Skip to content

Commit 51d7531

Browse files
authored
Merge pull request #1434 from samchon/feat/chatgpt
Adapt samchon/openapi#112: ChatGPT strict mode configurable.
2 parents 0e7b90a + 829b866 commit 51d7531

File tree

843 files changed

+8785
-13812
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

843 files changed

+8785
-13812
lines changed

package.json

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "typia",
3-
"version": "7.3.0",
3+
"version": "7.4.0",
44
"description": "Superfast runtime validators with only one line",
55
"main": "lib/index.js",
66
"typings": "lib/index.d.ts",
@@ -41,7 +41,7 @@
4141
},
4242
"homepage": "https://typia.io",
4343
"dependencies": {
44-
"@samchon/openapi": "^2.1.2",
44+
"@samchon/openapi": "^2.2.0",
4545
"commander": "^10.0.0",
4646
"comment-json": "^4.2.3",
4747
"inquirer": "^8.2.5",
@@ -50,7 +50,7 @@
5050
},
5151
"peerDependencies": {
5252
"typescript": ">=4.8.0 <5.8.0",
53-
"@samchon/openapi": ">=2.1.2 <3.0.0"
53+
"@samchon/openapi": ">=2.2.0 <3.0.0"
5454
},
5555
"devDependencies": {
5656
"@rollup/plugin-commonjs": "^26.0.1",

packages/typescript-json/package.json

+3-3
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "typescript-json",
3-
"version": "7.3.0-dev.20241213",
3+
"version": "7.4.0-dev.20241215",
44
"description": "Superfast runtime validators with only one line",
55
"main": "lib/index.js",
66
"typings": "lib/index.d.ts",
@@ -37,11 +37,11 @@
3737
},
3838
"homepage": "https://typia.io",
3939
"dependencies": {
40-
"typia": "7.3.0-dev.20241213"
40+
"typia": "7.4.0-dev.20241215"
4141
},
4242
"peerDependencies": {
4343
"typescript": ">=4.8.0 <5.8.0",
44-
"@samchon/openapi": ">=2.1.2 <3.0.0"
44+
"@samchon/openapi": ">=2.2.0 <3.0.0"
4545
},
4646
"stackblitz": {
4747
"startCommand": "npm install && npm run test"

src/programmers/llm/LlmApplicationOfValidateProgrammer.ts

+4-2
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,10 @@ import { ValidateProgrammer } from "../ValidateProgrammer";
1212
import { LlmApplicationProgrammer } from "./LlmApplicationProgrammer";
1313

1414
export namespace LlmApplicationOfValidateProgrammer {
15-
export const validate = (model: ILlmSchema.Model) =>
16-
LlmApplicationProgrammer.validate(model);
15+
export const validate = <Model extends ILlmSchema.Model>(props: {
16+
model: Model;
17+
config?: Partial<ILlmSchema.ModelConfig[Model]>;
18+
}) => LlmApplicationProgrammer.validate(props);
1719

1820
export const write = <Model extends ILlmSchema.Model>(props: {
1921
context: ITypiaContext;

src/programmers/llm/LlmApplicationProgrammer.ts

+6-3
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,10 @@ import { JsonApplicationProgrammer } from "../json/JsonApplicationProgrammer";
1919
import { LlmSchemaProgrammer } from "./LlmSchemaProgrammer";
2020

2121
export namespace LlmApplicationProgrammer {
22-
export const validate = (model: ILlmSchema.Model) => {
22+
export const validate = <Model extends ILlmSchema.Model>(props: {
23+
model: Model;
24+
config?: Partial<ILlmSchema.ModelConfig[Model]>;
25+
}) => {
2326
let top: Metadata | undefined;
2427
return (
2528
metadata: Metadata,
@@ -36,7 +39,7 @@ export namespace LlmApplicationProgrammer {
3639
metadata.functions.length === 1
3740
)
3841
return validateFunction(explore.property, metadata.functions[0]!);
39-
else return LlmSchemaProgrammer.validate(model)(metadata);
42+
else return LlmSchemaProgrammer.validate(props)(metadata);
4043

4144
const output: string[] = [];
4245
const valid: boolean =
@@ -121,7 +124,7 @@ export namespace LlmApplicationProgrammer {
121124
metadata: Metadata;
122125
config?: Partial<ILlmSchema.ModelConfig[Model]>;
123126
}): ILlmApplication<Model> => {
124-
const errors: string[] = validate(props.model)(props.metadata, {
127+
const errors: string[] = validate(props)(props.metadata, {
125128
top: true,
126129
object: null,
127130
property: null,

src/programmers/llm/LlmParametersProgrammer.ts

+5-2
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,10 @@ export namespace LlmParametersProgrammer {
6363
};
6464

6565
export const validate =
66-
(model: ILlmSchema.Model) =>
66+
<Model extends ILlmSchema.Model>(props: {
67+
model: Model;
68+
config?: Partial<ILlmSchema.ModelConfig[Model]>;
69+
}) =>
6770
(metadata: Metadata, explore: MetadataFactory.IExplore): string[] => {
6871
const output: string[] = [];
6972
if (explore.top === true) {
@@ -84,7 +87,7 @@ export namespace LlmParametersProgrammer {
8487
output.push("LLM parameters must be a non-undefined object type.");
8588
}
8689
}
87-
output.push(...LlmSchemaProgrammer.validate(model)(metadata));
90+
output.push(...LlmSchemaProgrammer.validate(props)(metadata));
8891
return output;
8992
};
9093
}

src/programmers/llm/LlmSchemaProgrammer.ts

+41-11
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,9 @@
1-
import { ILlmSchema, IOpenApiSchemaError, IResult } from "@samchon/openapi";
1+
import {
2+
IChatGptSchema,
3+
ILlmSchema,
4+
IOpenApiSchemaError,
5+
IResult,
6+
} from "@samchon/openapi";
27
import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer";
38

49
import { IJsonSchemaCollection } from "../../schemas/json/IJsonSchemaCollection";
@@ -60,25 +65,52 @@ export namespace LlmSchemaProgrammer {
6065
};
6166

6267
export const validate =
63-
(model: ILlmSchema.Model) =>
68+
<Model extends ILlmSchema.Model>(props: {
69+
model: ILlmSchema.Model;
70+
config?: Partial<ILlmSchema.ModelConfig[Model]>;
71+
}) =>
6472
(metadata: Metadata): string[] => {
6573
const output: string[] = [];
74+
75+
// no additionalProperties in ChatGPT strict mode or Gemini
6676
if (
67-
metadata.atomics.some((a) => a.type === "bigint") ||
68-
metadata.constants.some((c) => c.type === "bigint")
69-
)
70-
output.push("LLM schema does not support bigint type.");
71-
if (
72-
(model === "chatgpt" || model === "gemini") &&
77+
((props.model === "chatgpt" &&
78+
(props.config as Partial<IChatGptSchema.IConfig> | undefined)
79+
?.strict === true) ||
80+
props.model === "gemini") &&
7381
metadata.objects.some((o) =>
7482
o.type.properties.some(
7583
(p) => p.key.isSoleLiteral() === false && p.value.size() !== 0,
7684
),
7785
)
7886
)
7987
output.push(
80-
`LLM schema of "${model}" does not support dynamic property in object.`,
88+
`LLM schema of "${props.model}"${props.model === "chatgpt" ? " (strict mode)" : ""} does not support dynamic property in object.`,
89+
);
90+
91+
// ChatGPT strict mode even does not support the optional property
92+
if (
93+
props.model === "chatgpt" &&
94+
(props.config as Partial<IChatGptSchema.IConfig> | undefined)
95+
?.strict === true &&
96+
metadata.objects.some((o) =>
97+
o.type.properties.some((p) => p.value.isRequired() === false),
98+
)
99+
)
100+
output.push(
101+
`LLM schema of "chatgpt" (strict mode) does not support optional property in object.`,
81102
);
103+
104+
// Gemini does not support the union type
105+
if (props.model === "gemini" && size(metadata) > 1)
106+
output.push("Gemini model does not support the union type.");
107+
108+
// just JSON rule
109+
if (
110+
metadata.atomics.some((a) => a.type === "bigint") ||
111+
metadata.constants.some((c) => c.type === "bigint")
112+
)
113+
output.push("LLM schema does not support bigint type.");
82114
if (
83115
metadata.tuples.some((t) =>
84116
t.type.elements.some((e) => e.isRequired() === false),
@@ -98,8 +130,6 @@ export namespace LlmSchemaProgrammer {
98130
native.name !== "File"
99131
)
100132
output.push(`LLM schema does not support ${native.name} type.`);
101-
if (model === "gemini" && size(metadata) > 1)
102-
output.push("Gemini model does not support the union type.");
103133
return output;
104134
};
105135
}

src/transformers/features/llm/LlmApplicationOfValidateTransformer.ts

+12-7
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,13 @@ export namespace LlmApplicationOfValidateTransformer {
3535
method: "application",
3636
node: props.expression.typeArguments[1],
3737
});
38+
const config: Partial<ILlmSchema.IConfig> = LlmModelPredicator.getConfig({
39+
context: props.context,
40+
method: "application",
41+
model,
42+
node: props.expression.typeArguments[2],
43+
}) as Partial<ILlmSchema.IConfig>;
44+
3845
const type: ts.Type = props.context.checker.getTypeFromTypeNode(top);
3946
const collection: MetadataCollection = new MetadataCollection({
4047
replace: MetadataCollection.replace,
@@ -48,7 +55,10 @@ export namespace LlmApplicationOfValidateTransformer {
4855
constant: true,
4956
absorb: false,
5057
functional: true,
51-
validate: LlmApplicationOfValidateProgrammer.validate(model),
58+
validate: LlmApplicationOfValidateProgrammer.validate({
59+
model,
60+
config,
61+
}),
5262
},
5363
collection,
5464
type,
@@ -66,12 +76,7 @@ export namespace LlmApplicationOfValidateTransformer {
6676
context: props.context,
6777
modulo: props.modulo,
6878
metadata: result.data,
69-
config: LlmModelPredicator.getConfig({
70-
context: props.context,
71-
method: "application",
72-
model,
73-
node: props.expression.typeArguments[2],
74-
}),
79+
config,
7580
});
7681
const literal: ts.Expression = ts.factory.createAsExpression(
7782
LiteralFactory.write(schema),

src/transformers/features/llm/LlmApplicationTransformer.ts

+13-8
Original file line numberDiff line numberDiff line change
@@ -32,9 +32,16 @@ export namespace LlmApplicationTransformer {
3232
// GET TYPE
3333
const model: ILlmSchema.Model = LlmModelPredicator.getModel({
3434
checker: props.context.checker,
35-
method: "applicationOfValidate",
35+
method: "application",
3636
node: props.expression.typeArguments[1],
3737
});
38+
const config: Partial<ILlmSchema.IConfig> = LlmModelPredicator.getConfig({
39+
context: props.context,
40+
method: "application",
41+
model,
42+
node: props.expression.typeArguments[2],
43+
}) as Partial<ILlmSchema.IConfig>;
44+
3845
const type: ts.Type = props.context.checker.getTypeFromTypeNode(top);
3946
const collection: MetadataCollection = new MetadataCollection({
4047
replace: MetadataCollection.replace,
@@ -48,7 +55,10 @@ export namespace LlmApplicationTransformer {
4855
constant: true,
4956
absorb: false,
5057
functional: true,
51-
validate: LlmApplicationProgrammer.validate(model),
58+
validate: LlmApplicationProgrammer.validate({
59+
model,
60+
config,
61+
}),
5262
},
5363
collection,
5464
type,
@@ -64,12 +74,7 @@ export namespace LlmApplicationTransformer {
6474
LlmApplicationProgrammer.write({
6575
model,
6676
metadata: result.data,
67-
config: LlmModelPredicator.getConfig({
68-
context: props.context,
69-
method: "application",
70-
model,
71-
node: props.expression.typeArguments[2],
72-
}),
77+
config,
7378
});
7479
const literal: ts.Expression = ts.factory.createAsExpression(
7580
LiteralFactory.write(schema),

src/transformers/features/llm/LlmParametersTransformer.ts

+12-7
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,13 @@ export namespace LlmParametersTransformer {
3636
method: "parameters",
3737
node: props.expression.typeArguments[1],
3838
});
39+
const config: Partial<ILlmSchema.IConfig> = LlmModelPredicator.getConfig({
40+
context: props.context,
41+
method: "parameters",
42+
model,
43+
node: props.expression.typeArguments[2],
44+
}) as Partial<ILlmSchema.IConfig>;
45+
3946
const type: ts.Type = props.context.checker.getTypeFromTypeNode(top);
4047
const collection: MetadataCollection = new MetadataCollection({
4148
replace: MetadataCollection.replace,
@@ -48,7 +55,10 @@ export namespace LlmParametersTransformer {
4855
escape: true,
4956
constant: true,
5057
absorb: false,
51-
validate: LlmParametersProgrammer.validate(model),
58+
validate: LlmParametersProgrammer.validate({
59+
model,
60+
config,
61+
}),
5262
},
5363
collection,
5464
type,
@@ -63,12 +73,7 @@ export namespace LlmParametersTransformer {
6373
const out: ILlmFunction<any>["parameters"] = LlmParametersProgrammer.write({
6474
model,
6575
metadata: result.data,
66-
config: LlmModelPredicator.getConfig({
67-
context: props.context,
68-
method: "parameters",
69-
model,
70-
node: props.expression.typeArguments[2],
71-
}),
76+
config,
7277
});
7378
return ts.factory.createAsExpression(
7479
LiteralFactory.write(out),

src/transformers/features/llm/LlmSchemaTransformer.ts

+12-7
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,13 @@ export namespace LlmSchemaTransformer {
3636
method: "schema",
3737
node: props.expression.typeArguments[1],
3838
});
39+
const config: Partial<ILlmSchema.IConfig> = LlmModelPredicator.getConfig({
40+
context: props.context,
41+
method: "schema",
42+
model,
43+
node: props.expression.typeArguments[2],
44+
}) as Partial<ILlmSchema.IConfig>;
45+
3946
const type: ts.Type = props.context.checker.getTypeFromTypeNode(top);
4047
const collection: MetadataCollection = new MetadataCollection({
4148
replace: MetadataCollection.replace,
@@ -48,7 +55,10 @@ export namespace LlmSchemaTransformer {
4855
escape: true,
4956
constant: true,
5057
absorb: false,
51-
validate: LlmSchemaProgrammer.validate(model),
58+
validate: LlmSchemaProgrammer.validate({
59+
model,
60+
config,
61+
}),
5262
},
5363
collection,
5464
type,
@@ -63,12 +73,7 @@ export namespace LlmSchemaTransformer {
6373
const out: LlmSchemaProgrammer.IOutput<any> = LlmSchemaProgrammer.write({
6474
model,
6575
metadata: result.data,
66-
config: LlmModelPredicator.getConfig({
67-
context: props.context,
68-
method: "schema",
69-
model,
70-
node: props.expression.typeArguments[2],
71-
}),
76+
config,
7277
});
7378
const schemaTypeNode = props.context.importer.type({
7479
file: "@samchon/openapi",

test-error/src/llm/llm.chatgpt.additionalProperties.ts

+12-3
Original file line numberDiff line numberDiff line change
@@ -4,15 +4,21 @@ typia.llm.schema<
44
{
55
dictionary: Record<string, string>;
66
},
7-
"chatgpt"
7+
"chatgpt",
8+
{
9+
strict: true;
10+
}
811
>({});
912
typia.llm.parameters<
1013
{
1114
input: {
1215
dictionary: Record<string, string>;
1316
};
1417
},
15-
"chatgpt"
18+
"chatgpt",
19+
{
20+
strict: true;
21+
}
1622
>();
1723
typia.llm.application<
1824
{
@@ -22,5 +28,8 @@ typia.llm.application<
2228
};
2329
}): void;
2430
},
25-
"chatgpt"
31+
"chatgpt",
32+
{
33+
strict: true;
34+
}
2635
>();

0 commit comments

Comments
 (0)