Skip to content

Commit

Permalink
Merge pull request samchon#1434 from samchon/feat/chatgpt
Browse files Browse the repository at this point in the history
Adapt samchon/openapi#112: ChatGPT strict mode configurable.
  • Loading branch information
samchon authored Dec 14, 2024
2 parents 0e7b90a + 829b866 commit 51d7531
Show file tree
Hide file tree
Showing 843 changed files with 8,785 additions and 13,812 deletions.
6 changes: 3 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "typia",
"version": "7.3.0",
"version": "7.4.0",
"description": "Superfast runtime validators with only one line",
"main": "lib/index.js",
"typings": "lib/index.d.ts",
Expand Down Expand Up @@ -41,7 +41,7 @@
},
"homepage": "https://typia.io",
"dependencies": {
"@samchon/openapi": "^2.1.2",
"@samchon/openapi": "^2.2.0",
"commander": "^10.0.0",
"comment-json": "^4.2.3",
"inquirer": "^8.2.5",
Expand All @@ -50,7 +50,7 @@
},
"peerDependencies": {
"typescript": ">=4.8.0 <5.8.0",
"@samchon/openapi": ">=2.1.2 <3.0.0"
"@samchon/openapi": ">=2.2.0 <3.0.0"
},
"devDependencies": {
"@rollup/plugin-commonjs": "^26.0.1",
Expand Down
6 changes: 3 additions & 3 deletions packages/typescript-json/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "typescript-json",
"version": "7.3.0-dev.20241213",
"version": "7.4.0-dev.20241215",
"description": "Superfast runtime validators with only one line",
"main": "lib/index.js",
"typings": "lib/index.d.ts",
Expand Down Expand Up @@ -37,11 +37,11 @@
},
"homepage": "https://typia.io",
"dependencies": {
"typia": "7.3.0-dev.20241213"
"typia": "7.4.0-dev.20241215"
},
"peerDependencies": {
"typescript": ">=4.8.0 <5.8.0",
"@samchon/openapi": ">=2.1.2 <3.0.0"
"@samchon/openapi": ">=2.2.0 <3.0.0"
},
"stackblitz": {
"startCommand": "npm install && npm run test"
Expand Down
6 changes: 4 additions & 2 deletions src/programmers/llm/LlmApplicationOfValidateProgrammer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,10 @@ import { ValidateProgrammer } from "../ValidateProgrammer";
import { LlmApplicationProgrammer } from "./LlmApplicationProgrammer";

export namespace LlmApplicationOfValidateProgrammer {
export const validate = (model: ILlmSchema.Model) =>
LlmApplicationProgrammer.validate(model);
export const validate = <Model extends ILlmSchema.Model>(props: {
model: Model;
config?: Partial<ILlmSchema.ModelConfig[Model]>;
}) => LlmApplicationProgrammer.validate(props);

export const write = <Model extends ILlmSchema.Model>(props: {
context: ITypiaContext;
Expand Down
9 changes: 6 additions & 3 deletions src/programmers/llm/LlmApplicationProgrammer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,10 @@ import { JsonApplicationProgrammer } from "../json/JsonApplicationProgrammer";
import { LlmSchemaProgrammer } from "./LlmSchemaProgrammer";

export namespace LlmApplicationProgrammer {
export const validate = (model: ILlmSchema.Model) => {
export const validate = <Model extends ILlmSchema.Model>(props: {
model: Model;
config?: Partial<ILlmSchema.ModelConfig[Model]>;
}) => {
let top: Metadata | undefined;
return (
metadata: Metadata,
Expand All @@ -36,7 +39,7 @@ export namespace LlmApplicationProgrammer {
metadata.functions.length === 1
)
return validateFunction(explore.property, metadata.functions[0]!);
else return LlmSchemaProgrammer.validate(model)(metadata);
else return LlmSchemaProgrammer.validate(props)(metadata);

const output: string[] = [];
const valid: boolean =
Expand Down Expand Up @@ -121,7 +124,7 @@ export namespace LlmApplicationProgrammer {
metadata: Metadata;
config?: Partial<ILlmSchema.ModelConfig[Model]>;
}): ILlmApplication<Model> => {
const errors: string[] = validate(props.model)(props.metadata, {
const errors: string[] = validate(props)(props.metadata, {
top: true,
object: null,
property: null,
Expand Down
7 changes: 5 additions & 2 deletions src/programmers/llm/LlmParametersProgrammer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,10 @@ export namespace LlmParametersProgrammer {
};

export const validate =
(model: ILlmSchema.Model) =>
<Model extends ILlmSchema.Model>(props: {
model: Model;
config?: Partial<ILlmSchema.ModelConfig[Model]>;
}) =>
(metadata: Metadata, explore: MetadataFactory.IExplore): string[] => {
const output: string[] = [];
if (explore.top === true) {
Expand All @@ -84,7 +87,7 @@ export namespace LlmParametersProgrammer {
output.push("LLM parameters must be a non-undefined object type.");
}
}
output.push(...LlmSchemaProgrammer.validate(model)(metadata));
output.push(...LlmSchemaProgrammer.validate(props)(metadata));
return output;
};
}
52 changes: 41 additions & 11 deletions src/programmers/llm/LlmSchemaProgrammer.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
import { ILlmSchema, IOpenApiSchemaError, IResult } from "@samchon/openapi";
import {
IChatGptSchema,
ILlmSchema,
IOpenApiSchemaError,
IResult,
} from "@samchon/openapi";
import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer";

import { IJsonSchemaCollection } from "../../schemas/json/IJsonSchemaCollection";
Expand Down Expand Up @@ -60,25 +65,52 @@ export namespace LlmSchemaProgrammer {
};

export const validate =
(model: ILlmSchema.Model) =>
<Model extends ILlmSchema.Model>(props: {
model: ILlmSchema.Model;
config?: Partial<ILlmSchema.ModelConfig[Model]>;
}) =>
(metadata: Metadata): string[] => {
const output: string[] = [];

// no additionalProperties in ChatGPT strict mode or Gemini
if (
metadata.atomics.some((a) => a.type === "bigint") ||
metadata.constants.some((c) => c.type === "bigint")
)
output.push("LLM schema does not support bigint type.");
if (
(model === "chatgpt" || model === "gemini") &&
((props.model === "chatgpt" &&
(props.config as Partial<IChatGptSchema.IConfig> | undefined)
?.strict === true) ||
props.model === "gemini") &&
metadata.objects.some((o) =>
o.type.properties.some(
(p) => p.key.isSoleLiteral() === false && p.value.size() !== 0,
),
)
)
output.push(
`LLM schema of "${model}" does not support dynamic property in object.`,
`LLM schema of "${props.model}"${props.model === "chatgpt" ? " (strict mode)" : ""} does not support dynamic property in object.`,
);

// ChatGPT strict mode even does not support the optional property
if (
props.model === "chatgpt" &&
(props.config as Partial<IChatGptSchema.IConfig> | undefined)
?.strict === true &&
metadata.objects.some((o) =>
o.type.properties.some((p) => p.value.isRequired() === false),
)
)
output.push(
`LLM schema of "chatgpt" (strict mode) does not support optional property in object.`,
);

// Gemini does not support the union type
if (props.model === "gemini" && size(metadata) > 1)
output.push("Gemini model does not support the union type.");

// just JSON rule
if (
metadata.atomics.some((a) => a.type === "bigint") ||
metadata.constants.some((c) => c.type === "bigint")
)
output.push("LLM schema does not support bigint type.");
if (
metadata.tuples.some((t) =>
t.type.elements.some((e) => e.isRequired() === false),
Expand All @@ -98,8 +130,6 @@ export namespace LlmSchemaProgrammer {
native.name !== "File"
)
output.push(`LLM schema does not support ${native.name} type.`);
if (model === "gemini" && size(metadata) > 1)
output.push("Gemini model does not support the union type.");
return output;
};
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,13 @@ export namespace LlmApplicationOfValidateTransformer {
method: "application",
node: props.expression.typeArguments[1],
});
const config: Partial<ILlmSchema.IConfig> = LlmModelPredicator.getConfig({
context: props.context,
method: "application",
model,
node: props.expression.typeArguments[2],
}) as Partial<ILlmSchema.IConfig>;

const type: ts.Type = props.context.checker.getTypeFromTypeNode(top);
const collection: MetadataCollection = new MetadataCollection({
replace: MetadataCollection.replace,
Expand All @@ -48,7 +55,10 @@ export namespace LlmApplicationOfValidateTransformer {
constant: true,
absorb: false,
functional: true,
validate: LlmApplicationOfValidateProgrammer.validate(model),
validate: LlmApplicationOfValidateProgrammer.validate({
model,
config,
}),
},
collection,
type,
Expand All @@ -66,12 +76,7 @@ export namespace LlmApplicationOfValidateTransformer {
context: props.context,
modulo: props.modulo,
metadata: result.data,
config: LlmModelPredicator.getConfig({
context: props.context,
method: "application",
model,
node: props.expression.typeArguments[2],
}),
config,
});
const literal: ts.Expression = ts.factory.createAsExpression(
LiteralFactory.write(schema),
Expand Down
21 changes: 13 additions & 8 deletions src/transformers/features/llm/LlmApplicationTransformer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,16 @@ export namespace LlmApplicationTransformer {
// GET TYPE
const model: ILlmSchema.Model = LlmModelPredicator.getModel({
checker: props.context.checker,
method: "applicationOfValidate",
method: "application",
node: props.expression.typeArguments[1],
});
const config: Partial<ILlmSchema.IConfig> = LlmModelPredicator.getConfig({
context: props.context,
method: "application",
model,
node: props.expression.typeArguments[2],
}) as Partial<ILlmSchema.IConfig>;

const type: ts.Type = props.context.checker.getTypeFromTypeNode(top);
const collection: MetadataCollection = new MetadataCollection({
replace: MetadataCollection.replace,
Expand All @@ -48,7 +55,10 @@ export namespace LlmApplicationTransformer {
constant: true,
absorb: false,
functional: true,
validate: LlmApplicationProgrammer.validate(model),
validate: LlmApplicationProgrammer.validate({
model,
config,
}),
},
collection,
type,
Expand All @@ -64,12 +74,7 @@ export namespace LlmApplicationTransformer {
LlmApplicationProgrammer.write({
model,
metadata: result.data,
config: LlmModelPredicator.getConfig({
context: props.context,
method: "application",
model,
node: props.expression.typeArguments[2],
}),
config,
});
const literal: ts.Expression = ts.factory.createAsExpression(
LiteralFactory.write(schema),
Expand Down
19 changes: 12 additions & 7 deletions src/transformers/features/llm/LlmParametersTransformer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,13 @@ export namespace LlmParametersTransformer {
method: "parameters",
node: props.expression.typeArguments[1],
});
const config: Partial<ILlmSchema.IConfig> = LlmModelPredicator.getConfig({
context: props.context,
method: "parameters",
model,
node: props.expression.typeArguments[2],
}) as Partial<ILlmSchema.IConfig>;

const type: ts.Type = props.context.checker.getTypeFromTypeNode(top);
const collection: MetadataCollection = new MetadataCollection({
replace: MetadataCollection.replace,
Expand All @@ -48,7 +55,10 @@ export namespace LlmParametersTransformer {
escape: true,
constant: true,
absorb: false,
validate: LlmParametersProgrammer.validate(model),
validate: LlmParametersProgrammer.validate({
model,
config,
}),
},
collection,
type,
Expand All @@ -63,12 +73,7 @@ export namespace LlmParametersTransformer {
const out: ILlmFunction<any>["parameters"] = LlmParametersProgrammer.write({
model,
metadata: result.data,
config: LlmModelPredicator.getConfig({
context: props.context,
method: "parameters",
model,
node: props.expression.typeArguments[2],
}),
config,
});
return ts.factory.createAsExpression(
LiteralFactory.write(out),
Expand Down
19 changes: 12 additions & 7 deletions src/transformers/features/llm/LlmSchemaTransformer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,13 @@ export namespace LlmSchemaTransformer {
method: "schema",
node: props.expression.typeArguments[1],
});
const config: Partial<ILlmSchema.IConfig> = LlmModelPredicator.getConfig({
context: props.context,
method: "schema",
model,
node: props.expression.typeArguments[2],
}) as Partial<ILlmSchema.IConfig>;

const type: ts.Type = props.context.checker.getTypeFromTypeNode(top);
const collection: MetadataCollection = new MetadataCollection({
replace: MetadataCollection.replace,
Expand All @@ -48,7 +55,10 @@ export namespace LlmSchemaTransformer {
escape: true,
constant: true,
absorb: false,
validate: LlmSchemaProgrammer.validate(model),
validate: LlmSchemaProgrammer.validate({
model,
config,
}),
},
collection,
type,
Expand All @@ -63,12 +73,7 @@ export namespace LlmSchemaTransformer {
const out: LlmSchemaProgrammer.IOutput<any> = LlmSchemaProgrammer.write({
model,
metadata: result.data,
config: LlmModelPredicator.getConfig({
context: props.context,
method: "schema",
model,
node: props.expression.typeArguments[2],
}),
config,
});
const schemaTypeNode = props.context.importer.type({
file: "@samchon/openapi",
Expand Down
15 changes: 12 additions & 3 deletions test-error/src/llm/llm.chatgpt.additionalProperties.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,21 @@ typia.llm.schema<
{
dictionary: Record<string, string>;
},
"chatgpt"
"chatgpt",
{
strict: true;
}
>({});
typia.llm.parameters<
{
input: {
dictionary: Record<string, string>;
};
},
"chatgpt"
"chatgpt",
{
strict: true;
}
>();
typia.llm.application<
{
Expand All @@ -22,5 +28,8 @@ typia.llm.application<
};
}): void;
},
"chatgpt"
"chatgpt",
{
strict: true;
}
>();
Loading

0 comments on commit 51d7531

Please sign in to comment.