@@ -76,7 +76,7 @@ export interface ILlmFunction<Model extends ILlmSchema.Model> {
76
76
*
77
77
* You can fill this property by the `@tag ${name}` comment tag.
78
78
*/
79
- tags ?: string [ ] ;
79
+ tags ?: string [ ] | undefined ;
80
80
81
81
/**
82
82
* Validate function of the arguments.
@@ -93,10 +93,18 @@ export interface ILlmFunction<Model extends ILlmSchema.Model> {
93
93
* And in my experience, OpenAI's `gpt-4o-mini` model tends to construct
94
94
* an invalid function calling arguments at the first trial about 50% of
95
95
* the time. However, if correct it through this `validate` function,
96
- * the success rate soars to 99% at the second trial, and I've never failed
97
- * at the third trial.
96
+ * the success rate soars to 99% at the second trial, and I've never
97
+ * failed at the third trial.
98
98
*
99
- * @param args Arguments to validate.
99
+ * > If you've {@link separated} parameters, use the
100
+ * > {@link ILlmFunction.ISeparated.validate} function instead when
101
+ * > validating the LLM composed arguments.
102
+ * >
103
+ * > In that case, This `validate` function would be meaningful only
104
+ * > when you've merged the LLM and human composed arguments by
105
+ * > {@link HttpLlm.mergeParameters} function.
106
+ *
107
+ * @param args Arguments to validate
100
108
* @returns Validation result
101
109
*/
102
110
validate : ( args : unknown ) => IValidation < unknown > ;
@@ -118,5 +126,32 @@ export namespace ILlmFunction {
118
126
* Parameters that would be composed by the human.
119
127
*/
120
128
human : ILlmSchema . ModelParameters [ Model ] | null ;
129
+
130
+ /**
131
+ * Validate function of the separated arguments.
132
+ *
133
+ * If LLM part of separated parameters has some properties,
134
+ * this `validate` function will be filled for the {@link llm}
135
+ * type validation.
136
+ *
137
+ * > You know what? LLM (Large Language Model) like OpenAI takes a lot of
138
+ * > mistakes when composing arguments in function calling. Even though
139
+ * > `number` like simple type is defined in the {@link parameters} schema,
140
+ * > LLM often fills it just by a `string` typed value.
141
+ * >
142
+ * > In that case, you have to give a validation feedback to the LLM by
143
+ * > using this `validate` function. The `validate` function will return
144
+ * > detailed information about every type errors about the arguments.
145
+ * >
146
+ * > And in my experience, OpenAI's `gpt-4o-mini` model tends to construct
147
+ * > an invalid function calling arguments at the first trial about 50% of
148
+ * > the time. However, if correct it through this `validate` function,
149
+ * > the success rate soars to 99% at the second trial, and I've never
150
+ * > failed at the third trial.
151
+ *
152
+ * @param args Arguments to validate
153
+ * @returns Validate result
154
+ */
155
+ validate ?: ( ( args : unknown ) => IValidation < unknown > ) | undefined ;
121
156
}
122
157
}
0 commit comments