Skip to content

Commit 8db9b97

Browse files
authored
Add new ILlmFunction.ISeparated.validate() function. (#155)
Validator function for separated arguments case.
1 parent 87007ed commit 8db9b97

9 files changed

+190
-17
lines changed

package.json

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "@samchon/openapi",
3-
"version": "3.0.0",
3+
"version": "3.1.0",
44
"description": "OpenAPI definitions and converters for 'typia' and 'nestia'.",
55
"main": "./lib/index.js",
66
"module": "./lib/index.mjs",
@@ -46,7 +46,7 @@
4646
"@anthropic-ai/sdk": "^0.32.1",
4747
"@google/generative-ai": "^0.21.0",
4848
"@nestia/core": "4.2.0",
49-
"@nestia/e2e": "0.8.2",
49+
"@nestia/e2e": "0.8.3",
5050
"@nestia/fetcher": "4.2.0",
5151
"@nestia/sdk": "4.2.0",
5252
"@nestjs/common": "^10.4.1",
@@ -82,7 +82,7 @@
8282
"typedoc-github-theme": "^0.2.1",
8383
"typescript": "~5.7.2",
8484
"typescript-transform-paths": "^3.5.2",
85-
"typia": "7.6.4",
85+
"typia": "^8.0.0",
8686
"uuid": "^10.0.0"
8787
},
8888
"sideEffects": false,

src/HttpLlm.ts

+6-5
Original file line numberDiff line numberDiff line change
@@ -194,11 +194,11 @@ export namespace HttpLlm {
194194
/**
195195
* Properties for the parameters' merging.
196196
*/
197-
export interface IMergeProps {
197+
export interface IMergeProps<Model extends ILlmSchema.Model> {
198198
/**
199199
* Metadata of the target function.
200200
*/
201-
function: ILlmFunction<any>;
201+
function: ILlmFunction<Model>;
202202

203203
/**
204204
* Arguments composed by the LLM.
@@ -217,7 +217,7 @@ export namespace HttpLlm {
217217
* If you've configured the {@link IHttpLlmApplication.IOptions.separate} option,
218218
* so that the parameters are separated to human and LLM sides, you can merge these
219219
* humand and LLM sides' parameters into one through this `HttpLlm.mergeParameters()`
220-
* function before the actual LLM function call execution.
220+
* function before the actual LLM function call wexecution.
221221
*
222222
* On contrary, if you've not configured the
223223
* {@link IHttpLlmApplication.IOptions.separate} option, this function would throw
@@ -226,8 +226,9 @@ export namespace HttpLlm {
226226
* @param props Properties for the parameters' merging
227227
* @returns Merged parameter values
228228
*/
229-
export const mergeParameters = (props: IMergeProps): object =>
230-
LlmDataMerger.parameters(props);
229+
export const mergeParameters = <Model extends ILlmSchema.Model>(
230+
props: IMergeProps<Model>,
231+
): object => LlmDataMerger.parameters(props);
231232

232233
/**
233234
* Merge two values.

src/composers/llm/ChatGptSchemaComposer.ts

+13
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import { ChatGptTypeChecker } from "../../utils/ChatGptTypeChecker";
88
import { LlmTypeCheckerV3_1 } from "../../utils/LlmTypeCheckerV3_1";
99
import { NamingConvention } from "../../utils/NamingConvention";
1010
import { OpenApiTypeChecker } from "../../utils/OpenApiTypeChecker";
11+
import { OpenApiValidator } from "../../utils/OpenApiValidator";
1112
import { LlmDescriptionInverter } from "./LlmDescriptionInverter";
1213
import { LlmSchemaV3_1Composer } from "./LlmSchemaV3_1Composer";
1314

@@ -271,6 +272,18 @@ export namespace ChatGptSchemaComposer {
271272
for (const key of Object.keys(props.parameters.$defs))
272273
if (key.endsWith(".Llm") === false && key.endsWith(".Human") === false)
273274
delete props.parameters.$defs[key];
275+
if (Object.keys(output.llm.properties).length !== 0) {
276+
const components: OpenApi.IComponents = {};
277+
output.validate = OpenApiValidator.create({
278+
components,
279+
schema: invert({
280+
components,
281+
schema: output.llm,
282+
$defs: output.llm.$defs,
283+
}),
284+
required: true,
285+
});
286+
}
274287
return output;
275288
};
276289

src/composers/llm/LlmSchemaV3Composer.ts

+8
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import { IResult } from "../../structures/IResult";
88
import { LlmTypeCheckerV3 } from "../../utils/LlmTypeCheckerV3";
99
import { OpenApiConstraintShifter } from "../../utils/OpenApiConstraintShifter";
1010
import { OpenApiTypeChecker } from "../../utils/OpenApiTypeChecker";
11+
import { OpenApiValidator } from "../../utils/OpenApiValidator";
1112
import { LlmDescriptionInverter } from "./LlmDescriptionInverter";
1213
import { LlmParametersFinder } from "./LlmParametersComposer";
1314

@@ -186,6 +187,13 @@ export namespace LlmSchemaV3Composer {
186187
required: [],
187188
},
188189
human: human as ILlmSchemaV3.IParameters | null,
190+
validate: llm
191+
? OpenApiValidator.create({
192+
components: {},
193+
schema: invert({ schema: llm }),
194+
required: true,
195+
})
196+
: undefined,
189197
};
190198
};
191199

src/composers/llm/LlmSchemaV3_1Composer.ts

+13
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ import { LlmTypeCheckerV3_1 } from "../../utils/LlmTypeCheckerV3_1";
77
import { NamingConvention } from "../../utils/NamingConvention";
88
import { OpenApiConstraintShifter } from "../../utils/OpenApiConstraintShifter";
99
import { OpenApiTypeChecker } from "../../utils/OpenApiTypeChecker";
10+
import { OpenApiValidator } from "../../utils/OpenApiValidator";
1011
import { JsonDescriptionUtil } from "../../utils/internal/JsonDescriptionUtil";
1112
import { LlmDescriptionInverter } from "./LlmDescriptionInverter";
1213
import { LlmParametersFinder } from "./LlmParametersComposer";
@@ -384,6 +385,18 @@ export namespace LlmSchemaV3_1Composer {
384385
for (const key of Object.keys(props.parameters.$defs))
385386
if (key.endsWith(".Llm") === false && key.endsWith(".Human") === false)
386387
delete props.parameters.$defs[key];
388+
if (Object.keys(output.llm.properties).length !== 0) {
389+
const components: OpenApi.IComponents = {};
390+
output.validate = OpenApiValidator.create({
391+
components,
392+
schema: invert({
393+
components,
394+
schema: output.llm,
395+
$defs: output.llm.$defs,
396+
}),
397+
required: true,
398+
});
399+
}
387400
return output;
388401
};
389402

src/structures/IHttpLlmFunction.ts

+37-2
Original file line numberDiff line numberDiff line change
@@ -193,8 +193,16 @@ export interface IHttpLlmFunction<Model extends ILlmSchema.Model> {
193193
* And in my experience, OpenAI's `gpt-4o-mini` model tends to construct
194194
* an invalid function calling arguments at the first trial about 50% of
195195
* the time. However, if correct it through this `validate` function,
196-
* the success rate soars to 99% at the second trial, and I've never failed
197-
* at the third trial.
196+
* the success rate soars to 99% at the second trial, and I've never
197+
* failed at the third trial.
198+
*
199+
* > If you've {@link separated} parameters, use the
200+
* > {@link IHttpLlmFunction.ISeparated.validate} function instead when
201+
* > validating the LLM composed arguments.
202+
* >
203+
* > In that case, This `validate` function would be meaningful only
204+
* > when you've merged the LLM and human composed arguments by
205+
* > {@link HttpLlm.mergeParameters} function.
198206
*
199207
* @param args Arguments to validate.
200208
* @returns Validation result
@@ -236,5 +244,32 @@ export namespace IHttpLlmFunction {
236244
* Parameters that would be composed by the human.
237245
*/
238246
human: ILlmSchema.ModelParameters[Model] | null;
247+
248+
/**
249+
* Validate function of the separated arguments.
250+
*
251+
* If LLM part of separated parameters has some properties,
252+
* this `validate` function will be filled for the {@link llm}
253+
* type validation.
254+
*
255+
* > You know what? LLM (Large Language Model) like OpenAI takes a lot of
256+
* > mistakes when composing arguments in function calling. Even though
257+
* > `number` like simple type is defined in the {@link parameters} schema,
258+
* > LLM often fills it just by a `string` typed value.
259+
* >
260+
* > In that case, you have to give a validation feedback to the LLM by
261+
* > using this `validate` function. The `validate` function will return
262+
* > detailed information about every type errors about the arguments.
263+
* >
264+
* > And in my experience, OpenAI's `gpt-4o-mini` model tends to construct
265+
* > an invalid function calling arguments at the first trial about 50% of
266+
* > the time. However, if correct it through this `validate` function,
267+
* > the success rate soars to 99% at the second trial, and I've never
268+
* > failed at the third trial.
269+
*
270+
* @param args Arguments to validate
271+
* @returns Validate result
272+
*/
273+
validate?: ((args: unknown) => IValidation<unknown>) | undefined;
239274
}
240275
}

src/structures/ILlmFunction.ts

+39-4
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ export interface ILlmFunction<Model extends ILlmSchema.Model> {
7676
*
7777
* You can fill this property by the `@tag ${name}` comment tag.
7878
*/
79-
tags?: string[];
79+
tags?: string[] | undefined;
8080

8181
/**
8282
* Validate function of the arguments.
@@ -93,10 +93,18 @@ export interface ILlmFunction<Model extends ILlmSchema.Model> {
9393
* And in my experience, OpenAI's `gpt-4o-mini` model tends to construct
9494
* an invalid function calling arguments at the first trial about 50% of
9595
* the time. However, if correct it through this `validate` function,
96-
* the success rate soars to 99% at the second trial, and I've never failed
97-
* at the third trial.
96+
* the success rate soars to 99% at the second trial, and I've never
97+
* failed at the third trial.
9898
*
99-
* @param args Arguments to validate.
99+
* > If you've {@link separated} parameters, use the
100+
* > {@link ILlmFunction.ISeparated.validate} function instead when
101+
* > validating the LLM composed arguments.
102+
* >
103+
* > In that case, This `validate` function would be meaningful only
104+
* > when you've merged the LLM and human composed arguments by
105+
* > {@link HttpLlm.mergeParameters} function.
106+
*
107+
* @param args Arguments to validate
100108
* @returns Validation result
101109
*/
102110
validate: (args: unknown) => IValidation<unknown>;
@@ -118,5 +126,32 @@ export namespace ILlmFunction {
118126
* Parameters that would be composed by the human.
119127
*/
120128
human: ILlmSchema.ModelParameters[Model] | null;
129+
130+
/**
131+
* Validate function of the separated arguments.
132+
*
133+
* If LLM part of separated parameters has some properties,
134+
* this `validate` function will be filled for the {@link llm}
135+
* type validation.
136+
*
137+
* > You know what? LLM (Large Language Model) like OpenAI takes a lot of
138+
* > mistakes when composing arguments in function calling. Even though
139+
* > `number` like simple type is defined in the {@link parameters} schema,
140+
* > LLM often fills it just by a `string` typed value.
141+
* >
142+
* > In that case, you have to give a validation feedback to the LLM by
143+
* > using this `validate` function. The `validate` function will return
144+
* > detailed information about every type errors about the arguments.
145+
* >
146+
* > And in my experience, OpenAI's `gpt-4o-mini` model tends to construct
147+
* > an invalid function calling arguments at the first trial about 50% of
148+
* > the time. However, if correct it through this `validate` function,
149+
* > the success rate soars to 99% at the second trial, and I've never
150+
* > failed at the third trial.
151+
*
152+
* @param args Arguments to validate
153+
* @returns Validate result
154+
*/
155+
validate?: ((args: unknown) => IValidation<unknown>) | undefined;
121156
}
122157
}

src/utils/LlmDataMerger.ts

+5-3
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,11 @@ export namespace LlmDataMerger {
1010
/**
1111
* Properties of {@link parameters} function.
1212
*/
13-
export interface IProps {
13+
export interface IProps<Model extends ILlmSchema.Model> {
1414
/**
1515
* Target function to call.
1616
*/
17-
function: ILlmFunction<ILlmSchema.Model>;
17+
function: ILlmFunction<Model>;
1818

1919
/**
2020
* Arguments composed by LLM (Large Language Model).
@@ -42,7 +42,9 @@ export namespace LlmDataMerger {
4242
* @param props Properties to combine LLM and human arguments with metadata.
4343
* @returns Combined arguments
4444
*/
45-
export const parameters = (props: IProps): object => {
45+
export const parameters = <Model extends ILlmSchema.Model>(
46+
props: IProps<Model>,
47+
): object => {
4648
const separated = props.function.separated;
4749
if (separated === undefined)
4850
throw new Error(
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
import { TestValidator } from "@nestia/e2e";
2+
import {
3+
ILlmFunction,
4+
ILlmSchema,
5+
IOpenApiSchemaError,
6+
IResult,
7+
OpenApi,
8+
OpenApiTypeChecker,
9+
} from "@samchon/openapi";
10+
import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer";
11+
import typia from "typia";
12+
13+
export const test_chatgpt_parameters_separate_validate = (): void =>
14+
validate_llm_parameters_separate_validate("chatgpt");
15+
16+
export const test_claude_parameters_separate_validate = (): void =>
17+
validate_llm_parameters_separate_validate("claude");
18+
19+
export const test_gemini_parameters_separate_validate = (): void =>
20+
validate_llm_parameters_separate_validate("gemini");
21+
22+
export const test_llama_parameters_separate_validate = (): void =>
23+
validate_llm_parameters_separate_validate("llama");
24+
25+
export const test_llm_v30_parameters_separate_validate = (): void =>
26+
validate_llm_parameters_separate_validate("3.0");
27+
28+
export const test_llm_v31_parameters_separate_validate = (): void =>
29+
validate_llm_parameters_separate_validate("3.1");
30+
31+
const validate_llm_parameters_separate_validate = <
32+
Model extends ILlmSchema.Model,
33+
>(
34+
model: Model,
35+
): void => {
36+
const collection = typia.json.schemas<[ISeparatable, IHumanOnly]>();
37+
const validate = (schema: OpenApi.IJsonSchema, exists: boolean) => {
38+
const result: IResult<
39+
ILlmSchema.IParameters<Model>,
40+
IOpenApiSchemaError
41+
> = LlmSchemaComposer.parameters(model)({
42+
$defs: {},
43+
components: collection.components,
44+
schema: schema as OpenApi.IJsonSchema.IReference,
45+
config: LlmSchemaComposer.defaultConfig(model),
46+
} as any) as IResult<ILlmSchema.IParameters<Model>, IOpenApiSchemaError>;
47+
if (result.success === false) throw new Error("Failed to convert");
48+
49+
const separated: ILlmFunction.ISeparated<Model> =
50+
LlmSchemaComposer.separateParameters(model)({
51+
parameters: result.value as ILlmSchema.IParameters<Model>,
52+
predicate: (s: OpenApi.IJsonSchema) => OpenApiTypeChecker.isNumber(s),
53+
} as any) as ILlmFunction.ISeparated<Model>;
54+
TestValidator.equals("validate")(!!separated.validate)(exists);
55+
};
56+
validate(collection.schemas[0], true);
57+
validate(collection.schemas[1], false);
58+
};
59+
60+
interface ISeparatable {
61+
title: string;
62+
value: number;
63+
}
64+
interface IHumanOnly {
65+
value: number;
66+
}

0 commit comments

Comments
 (0)