From 150ce80e7da67e018d516535148da66e08df48a6 Mon Sep 17 00:00:00 2001 From: Jeongho Nam Date: Sun, 5 Jan 2025 21:00:45 +0900 Subject: [PATCH] Fix `IHttpLlmFunction.seperated` composing bug. --- package.json | 2 +- src/composers/HttpLlmApplicationComposer.ts | 2 +- src/composers/LlmSchemaComposer.ts | 19 +++++ src/composers/llm/ChatGptSchemaComposer.ts | 5 ++ src/composers/llm/ClaudeSchemaComposer.ts | 5 ++ src/composers/llm/GeminiSchemaComposer.ts | 5 ++ src/composers/llm/LlamaSchemaComposer.ts | 5 ++ src/composers/llm/LlmSchemaV3Composer.ts | 5 ++ src/composers/llm/LlmSchemaV3_1Composer.ts | 5 ++ test/examples/chatgpt-structured-output.ts | 72 +++++++++++++++++++ .../llm/validate_llm_application_separate.ts | 72 +++++++++++++++++++ 11 files changed, 195 insertions(+), 2 deletions(-) create mode 100644 test/examples/chatgpt-structured-output.ts create mode 100644 test/features/llm/validate_llm_application_separate.ts diff --git a/package.json b/package.json index b0dd5c6..a34e769 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@samchon/openapi", - "version": "2.3.1", + "version": "2.3.2", "description": "OpenAPI definitions and converters for 'typia' and 'nestia'.", "main": "./lib/index.js", "module": "./lib/index.mjs", diff --git a/src/composers/HttpLlmApplicationComposer.ts b/src/composers/HttpLlmApplicationComposer.ts index b73ec36..7de045d 100644 --- a/src/composers/HttpLlmApplicationComposer.ts +++ b/src/composers/HttpLlmApplicationComposer.ts @@ -196,7 +196,7 @@ export namespace HttpLlmComposer { additionalProperties: false, required: properties.map(([k]) => k), } as any as ILlmSchema.ModelParameters[Model]; - if (Object.keys($defs).length) + if (LlmSchemaComposer.isDefs(props.model)) (parameters as any as IChatGptSchema.IParameters).$defs = $defs; const operation: OpenApi.IOperation = props.route.operation(); diff --git a/src/composers/LlmSchemaComposer.ts b/src/composers/LlmSchemaComposer.ts index 43a838c..f5dab61 100644 --- a/src/composers/LlmSchemaComposer.ts +++ b/src/composers/LlmSchemaComposer.ts @@ -34,6 +34,13 @@ export namespace LlmSchemaComposer { export const separateParameters = ( model: Model, ) => SEPARATE_PARAMETERS[model]; + + /** + * @internal + */ + export const isDefs = ( + model: Model, + ): boolean => IS_DEFS[model](); } const PARAMETERS_CASTERS = { @@ -95,3 +102,15 @@ const TYPE_CHECKERS = { "3.0": LlmTypeCheckerV3, "3.1": LlmTypeCheckerV3_1, }; + +/** + * @internal + */ +const IS_DEFS = { + chatgpt: () => ChatGptSchemaComposer.IS_DEFS, + claude: () => ClaudeSchemaComposer.IS_DEFS, + gemini: () => GeminiSchemaComposer.IS_DEFS, + llama: () => LlamaSchemaComposer.IS_DEFS, + "3.0": () => LlmSchemaV3Composer.IS_DEFS, + "3.1": () => LlmSchemaV3_1Composer.IS_DEFS, +}; diff --git a/src/composers/llm/ChatGptSchemaComposer.ts b/src/composers/llm/ChatGptSchemaComposer.ts index eeedf71..13065db 100644 --- a/src/composers/llm/ChatGptSchemaComposer.ts +++ b/src/composers/llm/ChatGptSchemaComposer.ts @@ -10,6 +10,11 @@ import { OpenApiTypeChecker } from "../../utils/OpenApiTypeChecker"; import { LlmSchemaV3_1Composer } from "./LlmSchemaV3_1Composer"; export namespace ChatGptSchemaComposer { + /** + * @internal + */ + export const IS_DEFS = true; + export const parameters = (props: { config: IChatGptSchema.IConfig; components: OpenApi.IComponents; diff --git a/src/composers/llm/ClaudeSchemaComposer.ts b/src/composers/llm/ClaudeSchemaComposer.ts index b3d7300..4f16bd1 100644 --- a/src/composers/llm/ClaudeSchemaComposer.ts +++ b/src/composers/llm/ClaudeSchemaComposer.ts @@ -6,6 +6,11 @@ import { IResult } from "../../typings/IResult"; import { LlmSchemaV3_1Composer } from "./LlmSchemaV3_1Composer"; export namespace ClaudeSchemaComposer { + /** + * @internal + */ + export const IS_DEFS = true; + export const parameters = (props: { config: IClaudeSchema.IConfig; components: OpenApi.IComponents; diff --git a/src/composers/llm/GeminiSchemaComposer.ts b/src/composers/llm/GeminiSchemaComposer.ts index 701f74d..48e78ae 100644 --- a/src/composers/llm/GeminiSchemaComposer.ts +++ b/src/composers/llm/GeminiSchemaComposer.ts @@ -12,6 +12,11 @@ import { LlmParametersFinder } from "./LlmParametersComposer"; import { LlmSchemaV3Composer } from "./LlmSchemaV3Composer"; export namespace GeminiSchemaComposer { + /** + * @internal + */ + export const IS_DEFS = false; + export const parameters = (props: { config: IGeminiSchema.IConfig; components: OpenApi.IComponents; diff --git a/src/composers/llm/LlamaSchemaComposer.ts b/src/composers/llm/LlamaSchemaComposer.ts index 95eef74..ffcda99 100644 --- a/src/composers/llm/LlamaSchemaComposer.ts +++ b/src/composers/llm/LlamaSchemaComposer.ts @@ -6,6 +6,11 @@ import { IResult } from "../../typings/IResult"; import { LlmSchemaV3_1Composer } from "./LlmSchemaV3_1Composer"; export namespace LlamaSchemaComposer { + /** + * @internal + */ + export const IS_DEFS = true; + export const parameters = (props: { config: ILlamaSchema.IConfig; components: OpenApi.IComponents; diff --git a/src/composers/llm/LlmSchemaV3Composer.ts b/src/composers/llm/LlmSchemaV3Composer.ts index 8f37434..2fa1815 100644 --- a/src/composers/llm/LlmSchemaV3Composer.ts +++ b/src/composers/llm/LlmSchemaV3Composer.ts @@ -10,6 +10,11 @@ import { OpenApiTypeChecker } from "../../utils/OpenApiTypeChecker"; import { LlmParametersFinder } from "./LlmParametersComposer"; export namespace LlmSchemaV3Composer { + /** + * @internal + */ + export const IS_DEFS = false; + export const parameters = (props: { config: ILlmSchemaV3.IConfig; components: OpenApi.IComponents; diff --git a/src/composers/llm/LlmSchemaV3_1Composer.ts b/src/composers/llm/LlmSchemaV3_1Composer.ts index 5e8b56a..a9c1605 100644 --- a/src/composers/llm/LlmSchemaV3_1Composer.ts +++ b/src/composers/llm/LlmSchemaV3_1Composer.ts @@ -10,6 +10,11 @@ import { JsonDescriptionUtil } from "../../utils/internal/JsonDescriptionUtil"; import { LlmParametersFinder } from "./LlmParametersComposer"; export namespace LlmSchemaV3_1Composer { + /** + * @internal + */ + export const IS_DEFS = true; + export const parameters = (props: { config: ILlmSchemaV3_1.IConfig; components: OpenApi.IComponents; diff --git a/test/examples/chatgpt-structured-output.ts b/test/examples/chatgpt-structured-output.ts new file mode 100644 index 0000000..b73d7f8 --- /dev/null +++ b/test/examples/chatgpt-structured-output.ts @@ -0,0 +1,72 @@ +import OpenAI from "openai"; +import typia, { IValidation, tags } from "typia"; + +interface IMember { + email: string & tags.Format<"email">; + name: string; + age: number; + hobbies: string[]; + joined_at: string & tags.Format<"date">; +} + +const step = async ( + failure?: IValidation.IFailure | undefined, +): Promise> => { + const client: OpenAI = new OpenAI({ + apiKey: "", + }); + const completion: OpenAI.ChatCompletion = + await client.chat.completions.create({ + model: "gpt-4o", + messages: [ + { + role: "user", + content: [ + "I am a new member of the community.", + "", + "My name is John Doe, and I am 25 years old.", + "I like playing basketball and reading books,", + "and joined to this community at 2022-01-01.", + ].join("\n"), + }, + ...(failure + ? [ + { + role: "system", + content: [ + "You A.I. agent had taken a mistak that", + "returing wrong typed structured data.", + "", + "Here is the detailed list of type errors.", + "Review and correct them at the next step.", + "", + "```json", + JSON.stringify(failure.errors, null, 2), + "```", + ].join("\n"), + } satisfies OpenAI.ChatCompletionSystemMessageParam, + ] + : []), + ], + response_format: { + type: "json_schema", + json_schema: { + name: "member", + schema: typia.llm.parameters() as any, + }, + }, + }); + const member: IMember = JSON.parse(completion.choices[0].message.content!); + return typia.validate(member); +}; + +const main = async (): Promise => { + let result: IValidation | undefined = undefined; + for (let i: number = 0; i < 3; ++i) { + if (result && result.success === true) break; + result = await step(result); + } + console.log(result); +}; + +main().catch(console.error); diff --git a/test/features/llm/validate_llm_application_separate.ts b/test/features/llm/validate_llm_application_separate.ts new file mode 100644 index 0000000..d4fe887 --- /dev/null +++ b/test/features/llm/validate_llm_application_separate.ts @@ -0,0 +1,72 @@ +import { TestValidator } from "@nestia/e2e"; +import { + HttpLlm, + IHttpLlmApplication, + ILlmSchema, + OpenApi, + OpenApiV3, + OpenApiV3_1, + SwaggerV2, +} from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; +import { Singleton } from "tstl"; +import typia from "typia"; + +export const test_chatgpt_application_separate = async (): Promise => { + await validate_llm_application_separate("chatgpt", false); + await validate_llm_application_separate("chatgpt", true); +}; + +export const test_claude_application_separate = async (): Promise => { + await validate_llm_application_separate("claude", false); + await validate_llm_application_separate("claude", true); +}; + +export const test_gemini_application_separate = async (): Promise => { + await validate_llm_application_separate("gemini", false); +}; + +export const test_llama_application_separate = async (): Promise => { + await validate_llm_application_separate("llama", false); + await validate_llm_application_separate("llama", true); +}; + +export const test_llm_v30_application_separate = async (): Promise => { + await validate_llm_application_separate("3.0", false); + await validate_llm_application_separate("3.0", true); +}; + +export const test_llm_v31_application_separate = async (): Promise => { + await validate_llm_application_separate("3.1", false); + await validate_llm_application_separate("3.1", true); +}; + +const validate_llm_application_separate = async < + Model extends ILlmSchema.Model, +>( + model: Model, + constraint: boolean, +): Promise => { + const application: IHttpLlmApplication = HttpLlm.application({ + model, + document: await document.get(), + options: { + separate: (schema: any) => + LlmSchemaComposer.typeChecker(model).isString(schema as any) && + (schema as any)["x-wrtn-secret-key"] !== undefined, + constraint: constraint as any, + } as any, + }); + for (const func of application.functions) + TestValidator.equals("separated")(!!func.separated)(true); +}; + +const document = new Singleton(async (): Promise => { + const swagger: + | SwaggerV2.IDocument + | OpenApiV3.IDocument + | OpenApiV3_1.IDocument = await fetch( + "https://wrtnio.github.io/connectors/swagger/swagger.json", + ).then((r) => r.json()); + return OpenApi.convert(typia.assert(swagger)); +});