diff --git a/README.md b/README.md index 541f5a94..c85933b3 100644 --- a/README.md +++ b/README.md @@ -1,23 +1,15 @@ # `@samchon/openapi` ```mermaid -flowchart - subgraph "OpenAPI Specification" - v20("Swagger v2.0") --upgrades--> emended[["OpenAPI v3.1 (emended)"]] - v30("OpenAPI v3.0") --upgrades--> emended - v31("OpenAPI v3.1") --emends--> emended - end - subgraph "OpenAPI Generator" - emended --normalizes--> migration[["Migration Schema"]] - migration --"Artificial Intelligence"--> lfc{{"LLM Function Calling"}} - lfc --"OpenAI"--> chatgpt("ChatGPT") - lfc --"Google"--> gemini("Gemini") - lfc --"Anthropic"--> claude("Claude") - lfc --"Google" --> legacy_gemini(" (legacy) Gemini") - legacy_gemini --"3.0" --> custom(["Custom JSON Schema"]) - chatgpt --"3.1"--> custom - gemini --"3.1"--> standard(["Standard JSON Schema"]) - claude --"3.1"--> standard - end +flowchart TB +subgraph "OpenAPI Specification" + v20("Swagger v2.0") --upgrades--> emended[["OpenAPI v3.1 (emended)"]] + v30("OpenAPI v3.0") --upgrades--> emended + v31("OpenAPI v3.1") --emends--> emended +end +subgraph "LLM Function Calling" + emended --normalizes--> migration[["Migration Schema"]] + migration --"AI-Ready"--> schema{{"LLM Function Schema"}} +end ``` [![GitHub license](https://img.shields.io/badge/license-MIT-blue.svg)](https://github.com/samchon/openapi/blob/master/LICENSE) @@ -27,16 +19,18 @@ flowchart [![API Documents](https://img.shields.io/badge/API-Documents-forestgreen)](https://samchon.github.io/openapi/api/) [![Discord Badge](https://img.shields.io/badge/discord-samchon-d91965?style=flat&labelColor=5866f2&logo=discord&logoColor=white&link=https://discord.gg/E94XhzrUCZ)](https://discord.gg/E94XhzrUCZ) -Transform OpenAPI documents into type-safe LLM function calling applications. +**Transform OpenAPI documents into LLM function calling applications.** -`@samchon/openapi` converts any version of OpenAPI/Swagger documents into LLM function calling schemas for OpenAI GPT, Claude, and Gemini. It supports every OpenAPI version (Swagger 2.0, OpenAPI 3.0, and OpenAPI 3.1) with full TypeScript type definitions. The library also works with MCP (Model Context Protocol) servers, enabling seamless AI agent development. +`@samchon/openapi` converts OpenAPI/Swagger documents into LLM function calling schemas. With full TypeScript type safety, automatic validation, and support for every OpenAPI version, it's the simplest way to make your HTTP backend AI-callable. -**Key Features:** -- **Universal OpenAPI Support**: Works with Swagger 2.0, OpenAPI 3.0, and OpenAPI 3.1 -- **LLM Function Calling**: Auto-generates function schemas for OpenAI, Claude, and Gemini -- **Type-Safe Validation**: Built-in validation with detailed error feedback for LLM responses -- **MCP Integration**: Compose function calling schemas from MCP servers -- **Emended Specification**: Standardized OpenAPI v3.1 format that removes ambiguities +## Key Features + +- **🌐 Multi-Provider Support**: Works with OpenAI, Claude, Qwen, Llama, and other LLM providers +- **📝 Complete OpenAPI Coverage**: Swagger 2.0, OpenAPI 3.0, and OpenAPI 3.1 fully supported +- **🔒 Type-Safe Validation**: Built-in validation with detailed error feedback for LLM responses +- **🔄 MCP Integration**: Compose function calling schemas from Model Context Protocol servers +- **📊 Emended Specification**: Standardized OpenAPI v3.1 format that removes ambiguities +- **✅ Production Ready**: Battle-tested with 98%+ success rates in real-world LLM applications **Live Demo:** > https://github.com/user-attachments/assets/e1faf30b-c703-4451-b68b-2e7a8170bce5 @@ -55,50 +49,63 @@ Transform OpenAPI documents into type-safe LLM function calling applications. npm install @samchon/openapi ``` -Transform your OpenAPI document into an LLM function calling application in just a few lines: +Transform your OpenAPI document into an LLM function calling application: ```typescript import { HttpLlm, OpenApi } from "@samchon/openapi"; -// Load and convert your OpenAPI document +// 1. Load and convert your OpenAPI document const document: OpenApi.IDocument = OpenApi.convert(swagger); -// Generate LLM function calling schemas -const application: IHttpLlmApplication<"chatgpt"> = HttpLlm.application({ - model: "chatgpt", // "chatgpt" | "claude" | "gemini" +// 2. Generate LLM function calling schemas +const application: IHttpLlmApplication = HttpLlm.application({ document, }); -// Find a function by path and method -const func: IHttpLlmFunction<"chatgpt"> | undefined = application.functions.find( +// 3. Find a function to call +const func: IHttpLlmFunction | undefined = application.functions.find( (f) => f.path === "/bbs/articles" && f.method === "post" ); -// Execute the function with LLM-composed arguments -const result: unknown = await HttpLlm.execute({ +// 4. Use with any LLM provider (OpenAI, Claude, Qwen, etc.) +const completion = await llm.chat.completions.create({ + model: "gpt-4o", // or claude-3-5-sonnet, qwen-plus, etc. + messages: [...], + tools: [{ + type: "function", + function: { + name: func.name, + description: func.description, + parameters: func.parameters, + } + }], +}); + +// 5. Execute with validation +const result = await HttpLlm.execute({ connection: { host: "http://localhost:3000" }, application, function: func, - arguments: llmGeneratedArgs, // from OpenAI/Claude/Gemini + input: llmGeneratedArgs, }); ``` -That's it! Your HTTP backend is now callable by AI. +**That's it!** Your HTTP backend is now AI-callable across all major LLM providers. ## OpenAPI Definitions -`@samchon/openapi` provides complete TypeScript definitions for all OpenAPI versions and introduces an "emended" OpenAPI v3.1 specification that serves as a universal intermediate format. +`@samchon/openapi` provides complete TypeScript definitions for all OpenAPI versions and introduces an "emended" OpenAPI v3.1 specification that serves as an intermediate format. ```mermaid -flowchart - v20(Swagger v2.0) --upgrades--> emended[["OpenAPI v3.1 (emended)"]] - v30(OpenAPI v3.0) --upgrades--> emended - v31(OpenAPI v3.1) --emends--> emended - emended --downgrades--> v20d(Swagger v2.0) - emended --downgrades--> v30d(Swagger v3.0) +flowchart TB +v20(Swagger v2.0) --upgrades--> emended[["OpenAPI v3.1 (emended)"]] +v30(OpenAPI v3.0) --upgrades--> emended +v31(OpenAPI v3.1) --emends--> emended +emended --downgrades--> v20d(Swagger v2.0) +emended --downgrades--> v30d(OpenAPI v3.0) ``` **Supported Specifications:** @@ -109,7 +116,7 @@ flowchart ### What is "Emended" OpenAPI? -The emended specification removes ambiguities and duplications from OpenAPI v3.1, creating a cleaner, more consistent format. All conversions flow through this intermediate format. +The emended specification removes ambiguities and duplications from OpenAPI v3.1, creating a cleaner, more consistent format. All conversions flow through this intermediate format. **Key Improvements:** - **Operations**: Merges parameters from path and operation levels, resolves all references @@ -131,7 +138,7 @@ const v20: SwaggerV2.IDocument = OpenApi.downgrade(emended, "2.0"); ### Validating OpenAPI Documents -Use `typia` for runtime validation with detailed type checking - far more accurate than other validators: +Use `typia` for runtime validation with detailed type checking: ```typescript import { OpenApi, OpenApiV3, OpenApiV3_1, SwaggerV2 } from "@samchon/openapi"; @@ -140,8 +147,9 @@ import typia from "typia"; const document: any = await fetch("swagger.json").then(r => r.json()); // Validate with detailed error messages -const result: typia.IValidation = - typia.validate(document); +const result = typia.validate< + SwaggerV2.IDocument | OpenApiV3.IDocument | OpenApiV3_1.IDocument +>(document); if (result.success) { const emended: OpenApi.IDocument = OpenApi.convert(result.data); @@ -150,77 +158,53 @@ if (result.success) { } ``` -Try it in the playground: [Type assertion](https://typia.io/playground/?script=JYWwDg9gTgLgBAbzgeTAUwHYEEzADQrra4BqAzAapjsOQPoCMBAygO4CGA5p2lCQExwAvnABmUCCDgAiAAIBndiADGACwgYA9BCLtc0gNwAoUJFhwYAT1zsxEqdKs3DRo8o3z4IdsAxwAvHDs8pYYynAAFACUAFxwAAr2wPJoADwAbhDAACYAfAH5CEZwcJqacADiAKIAKnAAmsgAqgBKKPFVAHJY8QCScAAiyADCTQCyXTXFcO4YnnBQaPKQc2hxLUsrKQFBHMDwomgwahHTJdKqMDBg8jFlUOysAHSc+6oArgBG7ylQszCYGBPdwgTSKFTqLQ6TB6YCabyeXiaNAADyUYAANktNOkyE8AAzaXTAJ4AK3kGmk0yixhKs3m2QgyneIEBcXYGEsO0ePngi2WHjQZIpGGixmmZTgNXqHTgWGYzCqLRqvWQnWmTmA7CewV+MAq73YUGyqTOcAAPoRqKQyIwnr0BkyWYCzZaqMRaHiHU7WRgYK64GwuDw+Px7Y7mb7-SVchFGZHATTXCVJcM1SQlXUasg4FUJp0BlUBtN6fA0L7smhsnF3TRwz7ATta7hgRp0rwYHGG36k3SPBAsU9fKIIBFy5hK9kk0JjN5fNFgexjqoIvSB0LeBIoDSgA) | [Detailed validation](https://typia.io/playground/?script=JYWwDg9gTgLgBAbzgeTAUwHYEEzADQrra4BqAzAapjsOQPoCMBAygO4CGA5p2lCQExwAvnABmUCCDgAiAAIBndiADGACwgYA9BCLtc0gNwAoUJFhwYAT1zsxEqdKs3DRo8o3z4IdsAxwAvHDs8pYYynAAFACUAFxwAAr2wPJoADwAbhDAACYAfAH5CEZwcJqacADiAKIAKnAAmsgAqgBKKPFVAHJY8QCScAAiyADCTQCyXTXFcO4YnnBQaPKQc2hxLUsrKQFBHMDwomgwahHTJdKqMDBg8jFlUOysAHSc+6oArgBG7ylQszCYGBPdwgTSKFTqLQ6TB6YCabyeXiaNAADyUYAANktNOkyE8AAzaXTAJ4AK3kGmk0yixhKs3m2QgyneIEBcXYGEsO0ePngi2WHjQZIpGGixmmZTgNXqHTgJCwABlegMsDVeshOtN6Xylu8MfBAk5gOwnul2BicuwAakznAAD6EaikMiMJ7KpkswG2h1UYi0PHu5msjAwb1wNhcHh8fhugYe4Ohkq5CKMoOAmnTYCiSL8vVA+TvZTKJbyAL+QKic0pKKIW30iBYp6+UQQCK5-VPXgSKDyDMlEqLGDvKAYWnCVwlSXDDUkKotOo1ZBwKoTToDKoDLUeeBoYPZNDZOK+mix+OAnbH3DAjTpXgwFNnkN9mYeBtC5ut3eYffZDNCYzeL40TAlaJz1o2XbQDSQA) +Try it: [Type assertion](https://typia.io/playground/?script=JYWwDg9gTgLgBAbzgeTAUwHYEEzADQrra4BqAzAapjsOQPoCMBAygO4CGA5p2lCQExwAvnABmUCCDgAiAAIBndiADGACwgYA9BCLtc0gNwAoUJFhwYAT1zsxEqdKs3DRo8o3z4IdsAxwAvHDs8pYYynAAFACUAFxwAAr2wPJoADwAbhDAACYAfAH5CEZwcJqacADiAKIAKnAAmsgAqgBKKPFVAHJY8QCScAAiyADCTQCyXTXFcO4YnnBQaPKQc2hxLUsrKQFBHMDwomgwahHTJdKqMDBg8jFlUOysAHSc+6oArgBG7ylQszCYGBPdwgTSKFTqLQ6TB6YCabyeXiaNAADyUYAANktNOkyE8AAzaXTAJ4AK3kGmk0yixhKs3m2QgyneIEBcXYGEsO0ePngi2WHjQZIpGGixmmZTgNXqHTgWGYzCqLRqvWQnWmTmA7CewV+MAq73YUGyqTOcAAPoRqKQyIwnr0BkyWYCzZaqMRaHiHU7WRgYK64GwuDw+Px7Y7mb7-SVchFGZHATTXCVJcM1SQlXUasg4FUJp0BlUBtN6fA0L7smhsnF3TRwz7ATta7hgRp0rwYHGG36k3SPBAsU9fKIIBFy5hK9kk0JjN5fNFgexjqoIvSB0LeBIoDSgA) | [Detailed validation](https://typia.io/playground/?script=JYWwDg9gTgLgBAbzgeTAUwHYEEzADQrra4BqAzAapjsOQPoCMBAygO4CGA5p2lCQExwAvnABmUCCDgAiAAIBndiADGACwgYA9BCLtc0gNwAoUJFhwYAT1zsxEqdKs3DRo8o3z4IdsAxwAvHDs8pYYynAAFACUAFxwAAr2wPJoADwAbhDAACYAfAH5CEZwcJqacADiAKIAKnAAmsgAqgBKKPFVAHJY8QCScAAiyADCTQCyXTXFcO4YnnBQaPKQc2hxLUsrKQFBHMDwomgwahHTJdKqMDBg8jFlUOysAHSc+6oArgBG7ylQszCYGBPdwgTSKFTqLQ6TB6YCabyeXiaNAADyUYAANktNOkyE8AAzaXTAJ4AK3kGmk0yixhKs3m2QgyneIEBcXYGEsO0ePngi2WHjQZIpGGixmmZTgNXqHTgJCwABlegMsDVeshOtN6Xylu8MfBAk5gOwnul2BicuwAakznAAD6EaikMiMJ7KpkswG2h1UYi0PHu5msjAwb1wNhcHh8fhugYe4Ohkq5CKMoOAmnTYCiSL8vVA+TvZTKJbyAL+QKic0pKKIW30iBYp6+UQQCK5-VPXgSKDyDMlEqLGDvKAYWnCVwlSXDDUkKotOo1ZBwKoTToDKoDLUeeBoYPZNDZOK+mix+OAnbH3DAjTpXgwFNnkN9mYeBtC5ut3eYffZDNCYzeL40TAlaJz1o2XbQDSQA) ## LLM Function Calling -```mermaid -flowchart - subgraph "OpenAPI Specification" - v20("Swagger v2.0") --upgrades--> emended[["OpenAPI v3.1 (emended)"]] - v30("OpenAPI v3.0") --upgrades--> emended - v31("OpenAPI v3.1") --emends--> emended - end - subgraph "OpenAPI Generator" - emended --normalizes--> migration[["Migration Schema"]] - migration --"Artificial Intelligence"--> lfc{{"LLM Function Calling"}} - lfc --"OpenAI"--> chatgpt("ChatGPT") - lfc --"Google"--> gemini("Gemini") - lfc --"Anthropic"--> claude("Claude") - lfc --"Google" --> legacy_gemini(" (legacy) Gemini") - legacy_gemini --"3.0" --> custom(["Custom JSON Schema"]) - chatgpt --"3.1"--> custom - gemini --"3.1"--> standard(["Standard JSON Schema"]) - claude --"3.1"--> standard - end -``` - -Turn your HTTP backend into an AI-callable service. `@samchon/openapi` converts your OpenAPI document into function schemas that OpenAI, Claude, and Gemini can understand and call. - -### Supported AI Models - -**[`IChatGptSchema`](https://samchon.github.io/openapi/api/types/IChatGptSchema-1.html)** - For OpenAI GPT -- Fully compatible with OpenAI's strict mode - - strict mode is not recommended - - [validation feedback strategy](#validation-feedback---fixing-llm-mistakes) is much powerful -- Uses JSDoc tags in `description` to bypass OpenAI's schema limitations - -**[`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)** - For Anthropic Claude ⭐ **Recommended** -- Follows JSON Schema standard most closely -- No artificial restrictions - cleanest type definitions -- Ideal default choice when you're unsure which model to use - - working on every models unless OpenAI's strict mode or legacy Gemini - -**[`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)** - For Google Gemini -- Supports nearly all JSON Schema specifications (as of Nov 2025) -- Previous versions had severe restrictions, but these are now removed - -> [!NOTE] -> -> You can also compose [`ILlmApplication`](https://samchon.github.io/openapi/api/interfaces/ILlmApplication-1.html) from a TypeScript class using `typia`. -> -> https://typia.io/docs/llm/application -> -> ```typescript -> import { ILlmApplication } from "@samchon/openapi"; -> import typia from "typia"; -> -> const app: ILlmApplication<"chatgpt"> = -> typia.llm.application(); -> ``` +Turn your HTTP backend into an AI-callable service. `@samchon/openapi` converts your OpenAPI document into function calling schemas that work with OpenAI GPT, Claude, Qwen, Llama, and other LLM providers. + +**Type Definitions:** + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
CategoryBaseHTTPMCP
ApplicationILlmApplicationIHttpLlmApplicationIMcpLlmApplication
FunctionILlmFunctionIHttpLlmFunctionIMcpLlmFunction
ParametersILlmSchema.IParameters
SchemaILlmSchema
### Complete Example -Here's a full example showing how OpenAI GPT selects a function, fills arguments, and you execute it: - -**Resources:** -- [Full Example Code](https://github.com/samchon/openapi/blob/master/test/src/examples/chatgpt-function-call-to-sale-create.ts) -- [User Prompt Example](https://github.com/samchon/openapi/blob/master/test/examples/function-calling/prompts/microsoft-surface-pro-9.md) -- [LLM-Generated Arguments](https://github.com/samchon/openapi/blob/master/test/examples/function-calling/arguments/chatgpt.microsoft-surface-pro-9.input.json) -- [Function Calling Schema](https://github.com/samchon/openapi/blob/master/test/examples/function-calling/schemas/chatgpt.sale.schema.json) +Here's a full example showing LLM function calling with OpenAI (works identically with Claude, Qwen, etc.): ```typescript import { HttpLlm, OpenApi, IHttpLlmApplication, IHttpLlmFunction } from "@samchon/openapi"; @@ -228,14 +212,12 @@ import OpenAI from "openai"; // 1. Convert OpenAPI to LLM function calling application const document: OpenApi.IDocument = OpenApi.convert(swagger); -const application: IHttpLlmApplication<"chatgpt"> = - HttpLlm.application({ - model: "chatgpt", - document, - }); +const application: IHttpLlmApplication = HttpLlm.application({ + document, +}); // 2. Find the function by path and method -const func: IHttpLlmFunction<"chatgpt"> | undefined = application.functions.find( +const func: IHttpLlmFunction | undefined = application.functions.find( (f) => f.path === "/shoppings/sellers/sale" && f.method === "post" ); if (!func) throw new Error("Function not found"); @@ -259,9 +241,8 @@ const completion: OpenAI.ChatCompletion = await client.chat.completions.create({ }); // 4. Execute the function call on your actual server -const toolCall: OpenAI.ChatCompletionMessageToolCall = - completion.choices[0].message.tool_calls![0]; -const result: unknown = await HttpLlm.execute({ +const toolCall = completion.choices[0].message.tool_calls![0]; +const result = await HttpLlm.execute({ connection: { host: "http://localhost:37001" }, application, function: func, @@ -269,6 +250,24 @@ const result: unknown = await HttpLlm.execute({ }); ``` +**Works with Any LLM Provider:** + +```typescript +// OpenAI +const openai = new OpenAI({ apiKey: "..." }); + +// Anthropic Claude +const anthropic = new Anthropic({ apiKey: "..." }); + +// Alibaba Qwen via DashScope +const qwen = new OpenAI({ + apiKey: "...", + baseURL: "https://dashscope.aliyuncs.com/compatible-mode/v1", +}); + +// All use the same func.parameters schema +``` + ### Validation Feedback - Fixing LLM Mistakes **The Problem**: LLMs make type errors. A lot. @@ -281,15 +280,9 @@ Even when your schema says `Array`, GPT might return just `"string"`. In **The Solution**: Validate LLM output and send errors back for correction. ```typescript -import { HttpLlm, OpenApi, IHttpLlmApplication, IHttpLlmFunction, IValidation } from "@samchon/openapi"; +import { HttpLlm, IHttpLlmFunction, IValidation } from "@samchon/openapi"; -// Setup application -const document: OpenApi.IDocument = OpenApi.convert(swagger); -const application: IHttpLlmApplication<"chatgpt"> = HttpLlm.application({ - model: "chatgpt", - document, -}); -const func: IHttpLlmFunction<"chatgpt"> = application.functions[0]; +const func: IHttpLlmFunction = application.functions[0]; // Validate LLM-generated arguments const result: IValidation = func.validate(llmArguments); @@ -302,7 +295,7 @@ if (result.success === false) { }); } else { // Execute the validated function - const output: unknown = await HttpLlm.execute({ + const output = await HttpLlm.execute({ connection: { host: "http://localhost:3000" }, application, function: func, @@ -316,19 +309,19 @@ The validation uses [`typia.validate()`](https://typia.io/docs/validators/val Components | `typia` | `TypeBox` | `ajv` | `io-ts` | `zod` | `C.V.` -------------------------|--------|-----------|-------|---------|-------|------------------ -**Easy to use** | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ +**Easy to use** | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ [Object (simple)](https://github.com/samchon/typia/blob/master/test/src/structures/ObjectSimple.ts) | ✔ | ✔ | ✔ | ✔ | ✔ | ✔ [Object (hierarchical)](https://github.com/samchon/typia/blob/master/test/src/structures/ObjectHierarchical.ts) | ✔ | ✔ | ✔ | ✔ | ✔ | ✔ -[Object (recursive)](https://github.com/samchon/typia/blob/master/test/src/structures/ObjectRecursive.ts) | ✔ | ❌ | ✔ | ✔ | ✔ | ✔ | ✔ +[Object (recursive)](https://github.com/samchon/typia/blob/master/test/src/structures/ObjectRecursive.ts) | ✔ | ❌ | ✔ | ✔ | ✔ | ✔ [Object (union, implicit)](https://github.com/samchon/typia/blob/master/test/src/structures/ObjectUnionImplicit.ts) | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ [Object (union, explicit)](https://github.com/samchon/typia/blob/master/test/src/structures/ObjectUnionExplicit.ts) | ✔ | ✔ | ✔ | ✔ | ✔ | ❌ -[Object (additional tags)](https://github.com/samchon/typia/#comment-tags) | ✔ | ✔ | ✔ | ✔ | ✔ | ✔ -[Object (template literal types)](https://github.com/samchon/typia/blob/master/test/src/structures/TemplateUnion.ts) | ✔ | ✔ | ✔ | ❌ | ❌ | ❌ +[Object (additional tags)](https://github.com/samchon/#comment-tags) | ✔ | ✔ | ✔ | ✔ | ✔ | ✔ +[Object (template literal)](https://github.com/samchon/typia/blob/master/test/src/structures/TemplateUnion.ts) | ✔ | ✔ | ✔ | ❌ | ❌ | ❌ [Object (dynamic properties)](https://github.com/samchon/typia/blob/master/test/src/structures/DynamicTemplate.ts) | ✔ | ✔ | ✔ | ❌ | ❌ | ❌ [Array (rest tuple)](https://github.com/samchon/typia/blob/master/test/src/structures/TupleRestAtomic.ts) | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ [Array (hierarchical)](https://github.com/samchon/typia/blob/master/test/src/structures/ArrayHierarchical.ts) | ✔ | ✔ | ✔ | ✔ | ✔ | ✔ [Array (recursive)](https://github.com/samchon/typia/blob/master/test/src/structures/ArrayRecursive.ts) | ✔ | ✔ | ✔ | ✔ | ✔ | ❌ -[Array (recursive, union)](https://github.com/samchon/typia/blob/master/test/src/structures/ArrayRecursiveUnionExplicit.ts) | ✔ | ✔ | ❌ | ✔ | ✔ | ❌ +[Array (R+U, explicit)](https://github.com/samchon/typia/blob/master/test/src/structures/ArrayRecursiveUnionExplicit.ts) | ✔ | ✔ | ❌ | ✔ | ✔ | ❌ [Array (R+U, implicit)](https://github.com/samchon/typia/blob/master/test/src/structures/ArrayRecursiveUnionImplicit.ts) | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ [Array (repeated)](https://github.com/samchon/typia/blob/master/test/src/structures/ArrayRepeatedNullable.ts) | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ [Array (repeated, union)](https://github.com/samchon/typia/blob/master/test/src/structures/ArrayRepeatedUnionWithTuple.ts) | ✅ | ❌ | ❌ | ❌ | ❌ | ❌ @@ -336,34 +329,68 @@ Components | `typia` | `TypeBox` | `ajv` | `io-ts` | `zod` | `C.V. > `C.V.` means `class-validator` +### Human-AI Collaboration (Separating Parameters) + +Sometimes you need both human input and AI-generated parameters. Use the `separate` option to split parameters between LLM and human: + +```typescript +import { HttpLlm, LlmTypeChecker } from "@samchon/openapi"; + +const application = HttpLlm.application({ + document, + options: { + separate: (schema) => + LlmTypeChecker.isString(schema) && + !!schema.contentMediaType?.startsWith("image"), + }, +}); + +const func = application.functions.find( + (f) => f.path === "/shoppings/sellers/sale" && f.method === "post" +)!; + +// func.separated.llm - for AI to fill (text, numbers, etc.) +// func.separated.human - for human to provide (file uploads, images) + +const result = await HttpLlm.execute({ + connection: { host: "http://localhost:37001" }, + application, + function: func, + input: HttpLlm.mergeParameters({ + function: func, + llm: llmGeneratedArgs, + human: { + content: { + files: [...], // Human provides files + thumbnails: [...], // Human provides images + }, + }, + }), +}); +``` + ## Model Context Protocol + ```mermaid -flowchart - subgraph "JSON Schema Specification" - schemav4("JSON Schema v4 ~ v7") --upgrades--> emended[["OpenAPI v3.1 (emended)"]] - schema2910("JSON Schema 2019-03") --upgrades--> emended - schema2020("JSON Schema 2020-12") --emends--> emended - end - subgraph "OpenAPI Generator" - emended --normalizes--> migration[["Migration Schema"]] - migration --"Artificial Intelligence"--> lfc{{"LLM Function Calling"}} - lfc --"OpenAI"--> chatgpt("ChatGPT") - lfc --"Google"--> gemini("Gemini") - lfc --"Anthropic"--> claude("Claude") - lfc --"Google" --> legacy_gemini(" (legacy) Gemini") - legacy_gemini --"3.0" --> custom(["Custom JSON Schema"]) - chatgpt --"3.1"--> custom - gemini --"3.1"--> standard(["Standard JSON Schema"]) - claude --"3.1"--> standard - end +flowchart TB +subgraph "JSON Schema Specification" + schemav4("JSON Schema v4 ~ v7") --upgrades--> emended[["OpenAPI v3.1 (emended)"]] + schema2910("JSON Schema 2019-03") --upgrades--> emended + schema2020("JSON Schema 2020-12") --emends--> emended +end +subgraph "AI Ecosystem" + emended --normalizes--> migration[["Migration Schema"]] + migration --AI-Ready--> schema{{"LLM Function Schema"}} + schema --supports--> all("All LLM Providers") +end ``` `@samchon/openapi` provides better MCP function calling than using the [`mcp_servers`](https://openai.github.io/openai-agents-python/mcp/#using-mcp-servers) property directly. -While MCP (Model Context Protocol) can execute server functions directly through the `mcp_servers` property, `@samchon/openapi` offers significant advantages through [model specification support](https://wrtnlabs.io/agentica/docs/core/vendor/), [validation feedback](#validation-feedback), and [selector agent filtering](https://wrtnlabs.io/agentica/docs/concepts/function-calling/#orchestration-strategy) for context optimization. +While MCP can execute server functions directly through the `mcp_servers` property, `@samchon/openapi` offers significant advantages through [validation feedback](#validation-feedback---fixing-llm-mistakes) and [selector agent filtering](https://wrtnlabs.io/agentica/docs/concepts/function-calling/#orchestration-strategy) for context optimization. For example, the GitHub MCP server has 30 functions. Loading all of them via `mcp_servers` creates huge context that often causes AI agents to crash with hallucinations. Function calling with proper filtering avoids this problem. @@ -377,48 +404,64 @@ For example, the GitHub MCP server has 30 functions. Loading all of them via `mc **Creating MCP applications:** -Use [`McpLlm.application()`](https://samchon.github.io/openapi/api/functions/McpLlm.application.html) to create function calling schemas from MCP tools. The returned [`IMcpLlmApplication`](https://samchon.github.io/openapi/api/interfaces/IMcpLlmApplication-1.html) includes the [`IMcpLlmFunction.validate()`](https://samchon.github.io/openapi/api/interfaces/IMcpLlmFunction.html#validate) function for [validation feedback](#validation-feedback). +Use [`McpLlm.application()`](https://samchon.github.io/openapi/api/functions/McpLlm.application.html) to create function calling schemas from MCP tools. The returned [`IMcpLlmApplication`](https://samchon.github.io/openapi/api/interfaces/IMcpLlmApplication-1.html) works across all LLM providers and includes validation feedback. -MCP supports all JSON schema specifications without restrictions: - - JSON Schema v4, v5, v6, v7 - - JSON Schema 2019-03 - - JSON Schema 2020-12 +MCP supports all JSON schema specifications: +- JSON Schema v4, v5, v6, v7 +- JSON Schema 2019-03 +- JSON Schema 2020-12 ```typescript -import { - IMcpLlmApplication, - IMcpLlmFunction, - IValidation, - McpLlm, -} from "@samchon/openapi"; - -const application: IMcpLlmApplication<"chatgpt"> = McpLlm.application({ - model: "chatgpt", - tools: [...], +import { IMcpLlmApplication, IMcpLlmFunction, IValidation, McpLlm } from "@samchon/openapi"; + +const application: IMcpLlmApplication = McpLlm.application({ + tools: [...], // MCP tools }); -const func: IMcpLlmFunction<"chatgpt"> = application.functions.find( +const func: IMcpLlmFunction = application.functions.find( (f) => f.name === "create", )!; +// Validate with detailed feedback const result: IValidation = func.validate({ title: "Hello World", body: "Nice to meet you AI developers", thumbnail: "https://wrtnlabs.io/agentica/thumbnail.jpg", }); -console.log(result); + +if (result.success) { + // Execute validated function + console.log("Valid arguments:", result.data); +} else { + // Send errors back to LLM for correction + console.error("Validation errors:", result.errors); +} ``` +> [!NOTE] +> +> You can also compose [`ILlmApplication`](https://samchon.github.io/openapi/api/interfaces/ILlmApplication-1.html) from a TypeScript class using `typia`. +> +> https://typia.io/docs/llm/application +> +> ```typescript +> import { ILlmApplication } from "@samchon/openapi"; +> import typia from "typia"; +> +> const app: ILlmApplication = typia.llm.application(); +> ``` + ## Utilization Cases + ### Agentica [![Agentica](https://wrtnlabs.io/agentica/og.jpg)](https://github.com/wrtnlabs/agentica) https://github.com/wrtnlabs/agentica -Agentic AI framework that converts OpenAPI documents into LLM function calling schemas for ChatGPT, Claude, and Gemini. Uses `@samchon/openapi` to transform backend REST APIs into callable functions with automatic parameter validation and type-safe remote execution. +Agentic AI framework that converts OpenAPI documents into LLM function calling schemas. Uses `@samchon/openapi` to transform backend REST APIs into callable functions with automatic parameter validation and type-safe remote execution. ```typescript import { Agentica, assertHttpController } from "@agentica/core"; @@ -428,25 +471,20 @@ import typia from "typia"; import { MobileFileSystem } from "./services/MobileFileSystem"; const agent = new Agentica({ - model: "chatgpt", vendor: { api: new OpenAI({ apiKey: "********" }), - model: "gpt-4.1-mini", + model: "gpt-4o-mini", }, controllers: [ - // functions from TypeScript class - typia.llm.controller( + // Functions from TypeScript class + typia.llm.controller( "filesystem", MobileFileSystem(), ), - // functions from Swagger/OpenAPI - // Uses @samchon/openapi under the hood: - // 1. OpenApi.convert() to emended format - // 2. HttpLlm.application() to create IHttpLlmApplication<"chatgpt"> - // 3. IChatGptSchema composed for each API operation + // Functions from Swagger/OpenAPI + // Uses @samchon/openapi under the hood assertHttpController({ name: "shopping", - model: "chatgpt", document: await fetch( "https://shopping-be.wrtn.ai/editor/swagger.json", ).then(r => r.json()), @@ -472,29 +510,36 @@ import { MicroAgentica } from "@agentica/core"; import { OpenApi } from "@samchon/openapi"; const agent = new MicroAgentica({ - model: "chatgpt", vendor: { api: new OpenAI({ apiKey: "********" }), - model: "gpt-4.1-mini", + model: "gpt-4o-mini", }, controllers: [ // Compiler functions that receive/produce OpenApi.IDocument - typia.llm.controller( + typia.llm.controller( "api", new OpenApiWriteApplication(), ), ], }); -await agent.conversate("Design API specification, and generate backend app."); +await agent.conversate("Design API specification and generate backend app."); class OpenApiWriteApplication { // LLM calls this function with OpenApi.IDocument structure - // The type guarantees all operations have valid IJsonSchema definitions - public async write(document: OpenApi.IDocument): Promise { - // document.paths contains OpenApi.IOperation[] - // Each operation.parameters, requestBody, responses use OpenApi.IJsonSchema + public async write(document: OpenApi.IDocument): Promise { // Compiler validates schema structure before code generation ... } } -``` \ No newline at end of file +``` + + + + +## License + +MIT License + +Copyright (c) 2024 Jeongho Nam + +For detailed API documentation, visit: https://samchon.github.io/openapi/api/ diff --git a/package.json b/package.json index ee8ac923..9899ed3b 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "@samchon/openapi", - "version": "5.1.0", - "description": "OpenAPI definitions and converters for 'typia' and 'nestia'.", + "version": "6.0.0", + "description": "Universal OpenAPI to LLM function calling schemas. Transform any Swagger/OpenAPI document into type-safe schemas for OpenAI, Claude, Qwen, and more.", "main": "./lib/index.js", "module": "./lib/index.mjs", "typings": "./lib/index.d.ts", @@ -26,7 +26,7 @@ "openai", "chatgpt", "claude", - "gemini", + "qwen", "llama" ], "repository": { diff --git a/src/HttpLlm.ts b/src/HttpLlm.ts index 7514858e..0633cda8 100644 --- a/src/HttpLlm.ts +++ b/src/HttpLlm.ts @@ -4,7 +4,6 @@ import { OpenApiV3 } from "./OpenApiV3"; import { OpenApiV3_1 } from "./OpenApiV3_1"; import { SwaggerV2 } from "./SwaggerV2"; import { HttpLlmComposer } from "./composers/HttpLlmApplicationComposer"; -import { LlmSchemaComposer } from "./composers/LlmSchemaComposer"; import { HttpLlmFunctionFetcher } from "./http/HttpLlmFunctionFetcher"; import { IHttpConnection } from "./structures/IHttpConnection"; import { IHttpLlmApplication } from "./structures/IHttpLlmApplication"; @@ -12,7 +11,6 @@ import { IHttpLlmFunction } from "./structures/IHttpLlmFunction"; import { IHttpMigrateApplication } from "./structures/IHttpMigrateApplication"; import { IHttpResponse } from "./structures/IHttpResponse"; import { ILlmFunction } from "./structures/ILlmFunction"; -import { ILlmSchema } from "./structures/ILlmSchema"; import { LlmDataMerger } from "./utils/LlmDataMerger"; /** @@ -30,11 +28,10 @@ import { LlmDataMerger } from "./utils/LlmDataMerger"; * {@link HttpLlm.propagate HttpLlm.propagate()}. * * By the way, if you have configured the - * {@link IHttpLlmApplication.IOptions.separate} option to separate the - * parameters into human and LLM sides, you can merge these human and LLM sides' - * parameters into one through - * {@link HttpLlm.mergeParameters HttpLlm.mergeParameters()} before the actual - * LLM function call execution. + * {@link IHttpLlmApplication.IConfig.separate} option to separate the parameters + * into human and LLM sides, you can merge these human and LLM sides' parameters + * into one through {@link HttpLlm.mergeParameters HttpLlm.mergeParameters()} + * before the actual LLM function call execution. * * @author Jeongho Nam - https://github.com/samchon */ @@ -42,15 +39,8 @@ export namespace HttpLlm { /* ----------------------------------------------------------- COMPOSERS ----------------------------------------------------------- */ - /** - * Properties for the LLM function calling application composer. - * - * @template Model Target LLM model - */ - export interface IApplicationProps { - /** Target LLM model. */ - model: Model; - + /** Properties for the LLM function calling application composer. */ + export interface IApplicationProps { /** OpenAPI document to convert. */ document: | OpenApi.IDocument @@ -58,8 +48,8 @@ export namespace HttpLlm { | OpenApiV3.IDocument | OpenApiV3_1.IDocument; - /** Options for the LLM function calling schema conversion. */ - options?: Partial>; + /** Configuration for the LLM function calling schema conversion. */ + config?: Partial; } /** @@ -72,44 +62,31 @@ export namespace HttpLlm { * converted to the {@link IHttpLlmFunction LLM function} type, and they would * be used for the LLM function calling. * - * If you have configured the {@link IHttpLlmApplication.IOptions.separate} + * If you have configured the {@link IHttpLlmApplication.IConfig.separate} * option, every parameters in the {@link IHttpLlmFunction} would be separated * into both human and LLM sides. In that case, you can merge these human and * LLM sides' parameters into one through {@link HttpLlm.mergeParameters} * before the actual LLM function call execution. * - * Additionally, if you have configured the - * {@link IHttpLlmApplication.IOptions.keyword} as `true`, the number of - * {@link IHttpLlmFunction.parameters} are always 1 and the first parameter - * type is always {@link ILlmSchemaV3.IObject}. I recommend this option because - * LLM can understand the keyword arguments more easily. - * * @param props Properties for composition * @returns LLM function calling application */ - export const application = ( - props: IApplicationProps, - ): IHttpLlmApplication => { + export const application = ( + props: IApplicationProps, + ): IHttpLlmApplication => { // MIGRATE const migrate: IHttpMigrateApplication = HttpMigration.application( props.document, ); - const defaultConfig: ILlmSchema.IConfig = - LlmSchemaComposer.defaultConfig(props.model); - return HttpLlmComposer.application({ + return HttpLlmComposer.application({ migrate, - model: props.model, - options: { - ...Object.fromEntries( - Object.entries(defaultConfig).map( - ([key, value]) => - [key, (props.options as any)?.[key] ?? value] as const, - ), - ), - separate: props.options?.separate ?? null, - maxLength: props.options?.maxLength ?? 64, - equals: props.options?.equals ?? false, - } as any as IHttpLlmApplication.IOptions, + config: { + reference: props.config?.reference ?? true, + strict: props.config?.strict ?? false, + separate: props.config?.separate ?? null, + maxLength: props.config?.maxLength ?? 64, + equals: props.config?.equals ?? false, + }, }); }; @@ -117,12 +94,12 @@ export namespace HttpLlm { FETCHERS ----------------------------------------------------------- */ /** Properties for the LLM function call. */ - export interface IFetchProps { + export interface IFetchProps { /** Application of the LLM function calling. */ - application: IHttpLlmApplication; + application: IHttpLlmApplication; /** LLM function schema to call. */ - function: IHttpLlmFunction; + function: IHttpLlmFunction; /** Connection info to the HTTP server. */ connection: IHttpConnection; @@ -140,16 +117,12 @@ export namespace HttpLlm { * sometimes). * * By the way, if you've configured the - * {@link IHttpLlmApplication.IOptions.separate}, so that the parameters are - * separated to human and LLM sides, you have to merge these humand and LLM + * {@link IHttpLlmApplication.IConfig.separate}, so that the parameters are + * separated to human and LLM sides, you have to merge these human and LLM * sides' parameters into one through {@link HttpLlm.mergeParameters} * function. * - * About the {@link IHttpLlmApplication.IOptions.keyword} option, don't worry - * anything. This `HttmLlm.execute()` function will automatically recognize - * the keyword arguments and convert them to the proper sequence. - * - * For reference, if the target API endpoinnt responds none 200/201 status, + * For reference, if the target API endpoint responds none 200/201 status, * this would be considered as an error and the {@link HttpError} would be * thrown. Otherwise you don't want such rule, you can use the * {@link HttpLlm.propagate} function instead. @@ -158,9 +131,8 @@ export namespace HttpLlm { * @returns Return value (response body) from the API endpoint * @throws HttpError when the API endpoint responds none 200/201 status */ - export const execute = ( - props: IFetchProps, - ): Promise => HttpLlmFunctionFetcher.execute(props); + export const execute = (props: IFetchProps): Promise => + HttpLlmFunctionFetcher.execute(props); /** * Propagate the LLM function call. @@ -171,15 +143,11 @@ export namespace HttpLlm { * sometimes). * * By the way, if you've configured the - * {@link IHttpLlmApplication.IOptions.separate}, so that the parameters are + * {@link IHttpLlmApplication.IConfig.separate}, so that the parameters are * separated to human and LLM sides, you have to merge these humand and LLM * sides' parameters into one through {@link HttpLlm.mergeParameters} * function. * - * About the {@link IHttpLlmApplication.IOptions.keyword} option, don't worry - * anything. This `HttmLlm.propagate()` function will automatically recognize - * the keyword arguments and convert them to the proper sequence. - * * For reference, the propagation means always returning the response from the * API endpoint, even if the status is not 200/201. This is useful when you * want to handle the response by yourself. @@ -188,17 +156,16 @@ export namespace HttpLlm { * @returns Response from the API endpoint * @throws Error only when the connection is failed */ - export const propagate = ( - props: IFetchProps, - ): Promise => HttpLlmFunctionFetcher.propagate(props); + export const propagate = (props: IFetchProps): Promise => + HttpLlmFunctionFetcher.propagate(props); /* ----------------------------------------------------------- MERGERS ----------------------------------------------------------- */ /** Properties for the parameters' merging. */ - export interface IMergeProps { + export interface IMergeProps { /** Metadata of the target function. */ - function: ILlmFunction; + function: ILlmFunction; /** Arguments composed by the LLM. */ llm: object | null; @@ -210,22 +177,21 @@ export namespace HttpLlm { /** * Merge the parameters. * - * If you've configured the {@link IHttpLlmApplication.IOptions.separate} + * If you've configured the {@link IHttpLlmApplication.IConfig.separate} * option, so that the parameters are separated to human and LLM sides, you * can merge these humand and LLM sides' parameters into one through this * `HttpLlm.mergeParameters()` function before the actual LLM function call - * wexecution. + * execution. * * On contrary, if you've not configured the - * {@link IHttpLlmApplication.IOptions.separate} option, this function would + * {@link IHttpLlmApplication.IConfig.separate} option, this function would * throw an error. * * @param props Properties for the parameters' merging * @returns Merged parameter values */ - export const mergeParameters = ( - props: IMergeProps, - ): object => LlmDataMerger.parameters(props); + export const mergeParameters = (props: IMergeProps): object => + LlmDataMerger.parameters(props); /** * Merge two values. diff --git a/src/HttpMigration.ts b/src/HttpMigration.ts index c0a7d8ff..c9733f23 100644 --- a/src/HttpMigration.ts +++ b/src/HttpMigration.ts @@ -2,7 +2,7 @@ import { OpenApi } from "./OpenApi"; import { OpenApiV3 } from "./OpenApiV3"; import { OpenApiV3_1 } from "./OpenApiV3_1"; import { SwaggerV2 } from "./SwaggerV2"; -import { HttpMigrateApplicationComposer } from "./composers/migrate/HttpMigrateApplicationComposer"; +import { HttpMigrateApplicationComposer } from "./composers/HttpMigrateApplicationComposer"; import { HttpMigrateRouteFetcher } from "./http/HttpMigrateRouteFetcher"; import { IHttpConnection } from "./structures/IHttpConnection"; import { IHttpMigrateApplication } from "./structures/IHttpMigrateApplication"; diff --git a/src/McpLlm.ts b/src/McpLlm.ts index c0e958cc..48799534 100644 --- a/src/McpLlm.ts +++ b/src/McpLlm.ts @@ -29,15 +29,8 @@ import { OpenApiValidator } from "./utils/OpenApiValidator"; * @author Jeongho Nam - https://github.com/samchon */ export namespace McpLlm { - /** - * Properties for the LLM function calling application composer. - * - * @template Model Target LLM model - */ - export interface IApplicationProps { - /** Target LLM model. */ - model: Model; - + /** Properties for the LLM function calling application composer. */ + export interface IApplicationProps { /** * List of tools. * @@ -49,8 +42,8 @@ export namespace McpLlm { */ tools: Array; - /** Options for the LLM function calling schema conversion. */ - options?: Partial>; + /** Configuration for the LLM function calling schema conversion. */ + config?: Partial; } /** @@ -72,19 +65,14 @@ export namespace McpLlm { * @param props Properties for composition * @returns LLM function calling application */ - export const application = ( - props: IApplicationProps, - ): IMcpLlmApplication => { - const options: IMcpLlmApplication.IOptions = { - ...Object.fromEntries( - Object.entries(LlmSchemaComposer.defaultConfig(props.model)).map( - ([key, value]) => - [key, (props.options as any)?.[key] ?? value] as const, - ), - ), - maxLength: props.options?.maxLength ?? 64, - } as IMcpLlmApplication.IOptions; - const functions: IMcpLlmFunction[] = []; + export const application = (props: IApplicationProps): IMcpLlmApplication => { + const config: IMcpLlmApplication.IConfig = { + reference: props.config?.reference ?? true, + strict: props.config?.strict ?? false, + maxLength: props.config?.maxLength ?? 64, + equals: props.config?.equals ?? false, + }; + const functions: IMcpLlmFunction[] = []; const errors: IMcpLlmApplication.IError[] = []; props.tools.forEach((tool, i) => { @@ -114,17 +102,15 @@ export namespace McpLlm { } // CONVERT TO LLM PARAMETERS - const parameters: IResult< - ILlmSchema.IParameters, - IOpenApiSchemaError - > = LlmSchemaComposer.parameters(props.model)({ - config: options as any, - components, - schema: schema as - | OpenApi.IJsonSchema.IObject - | OpenApi.IJsonSchema.IReference, - accessor: `$input.tools[${i}].inputSchema`, - }) as IResult, IOpenApiSchemaError>; + const parameters: IResult = + LlmSchemaComposer.parameters({ + config, + components, + schema: schema as + | OpenApi.IJsonSchema.IObject + | OpenApi.IJsonSchema.IReference, + accessor: `$input.tools[${i}].inputSchema`, + }); if (parameters.success) functions.push({ name: tool.name, @@ -134,7 +120,7 @@ export namespace McpLlm { components, schema, required: true, - equals: options.equals, + equals: config.equals, }), }); else @@ -149,9 +135,8 @@ export namespace McpLlm { }); }); return { - model: props.model, functions, - options, + config, errors, }; }; diff --git a/src/composers/HttpLlmApplicationComposer.ts b/src/composers/HttpLlmApplicationComposer.ts index 1f680e20..55bb9e6b 100644 --- a/src/composers/HttpLlmApplicationComposer.ts +++ b/src/composers/HttpLlmApplicationComposer.ts @@ -3,7 +3,6 @@ import { IHttpLlmApplication } from "../structures/IHttpLlmApplication"; import { IHttpLlmFunction } from "../structures/IHttpLlmFunction"; import { IHttpMigrateApplication } from "../structures/IHttpMigrateApplication"; import { IHttpMigrateRoute } from "../structures/IHttpMigrateRoute"; -import { ILlmFunction } from "../structures/ILlmFunction"; import { ILlmSchema } from "../structures/ILlmSchema"; import { IOpenApiSchemaError } from "../structures/IOpenApiSchemaError"; import { IResult } from "../structures/IResult"; @@ -11,12 +10,18 @@ import { OpenApiValidator } from "../utils/OpenApiValidator"; import { LlmSchemaComposer } from "./LlmSchemaComposer"; export namespace HttpLlmComposer { - export const application = (props: { - model: Model; + export const application = (props: { migrate: IHttpMigrateApplication; - options: IHttpLlmApplication.IOptions; - }): IHttpLlmApplication => { + config?: Partial; + }): IHttpLlmApplication => { // COMPOSE FUNCTIONS + const config: IHttpLlmApplication.IConfig = { + separate: props.config?.separate ?? null, + maxLength: props.config?.maxLength ?? 64, + equals: props.config?.equals ?? false, + reference: props.config?.reference ?? true, + strict: props.config?.strict ?? false, + }; const errors: IHttpLlmApplication.IError[] = props.migrate.errors .filter((e) => e.operation()["x-samchon-human"] !== true) .map((e) => ({ @@ -26,7 +31,7 @@ export namespace HttpLlmComposer { operation: () => e.operation(), route: () => undefined, })); - const functions: IHttpLlmFunction[] = props.migrate.routes + const functions: IHttpLlmFunction[] = props.migrate.routes .filter((e) => e.operation()["x-samchon-human"] !== true) .map((route, i) => { if (route.method === "head") { @@ -54,11 +59,10 @@ export namespace HttpLlmComposer { return null; } const localErrors: string[] = []; - const func: IHttpLlmFunction | null = composeFunction({ - model: props.model, - config: props.options, + const func: IHttpLlmFunction | null = composeFunction({ components: props.migrate.document().components, - route: route, + config, + route, errors: localErrors, index: i, }); @@ -72,26 +76,24 @@ export namespace HttpLlmComposer { }); return func; }) - .filter((v): v is IHttpLlmFunction => v !== null); + .filter((v): v is IHttpLlmFunction => v !== null); - const app: IHttpLlmApplication = { - model: props.model, - options: props.options, + const app: IHttpLlmApplication = { + config, functions, errors, }; - shorten(app, props.options?.maxLength ?? 64); + shorten(app, props.config?.maxLength ?? 64); return app; }; - const composeFunction = (props: { - model: Model; + const composeFunction = (props: { components: OpenApi.IComponents; route: IHttpMigrateRoute; - config: IHttpLlmApplication.IOptions; + config: IHttpLlmApplication.IConfig; errors: string[]; index: number; - }): IHttpLlmFunction | null => { + }): IHttpLlmFunction | null => { // METADATA const endpoint: string = `$input.paths[${JSON.stringify(props.route.path)}][${JSON.stringify(props.route.method)}]`; const operation: OpenApi.IOperation = props.route.operation(); @@ -173,29 +175,25 @@ export namespace HttpLlmComposer { }; parameters.required = Object.keys(parameters.properties ?? {}); - const llmParameters: IResult< - ILlmSchema.IParameters, - IOpenApiSchemaError - > = LlmSchemaComposer.parameters(props.model)({ - config: props.config as any, - components: props.components, - schema: parameters, - accessor: `${endpoint}.parameters`, - }) as IResult, IOpenApiSchemaError>; + const llmParameters: IResult = + LlmSchemaComposer.parameters({ + config: props.config, + components: props.components, + schema: parameters, + accessor: `${endpoint}.parameters`, + }); // RETURN VALUE - const output: IResult, IOpenApiSchemaError> | undefined = - props.route.success - ? (LlmSchemaComposer.schema(props.model)({ - config: props.config as any, - components: props.components, - schema: props.route.success.schema, - accessor: `${endpoint}.responses[${JSON.stringify(props.route.success.status)}][${JSON.stringify(props.route.success.type)}].schema`, - $defs: llmParameters.success - ? (llmParameters.value as any).$defs! - : {}, - }) as IResult, IOpenApiSchemaError>) - : undefined; + const output: IResult | undefined = props + .route.success + ? LlmSchemaComposer.schema({ + config: props.config, + components: props.components, + schema: props.route.success.schema, + accessor: `${endpoint}.responses[${JSON.stringify(props.route.success.status)}][${JSON.stringify(props.route.success.type)}].schema`, + $defs: llmParameters.success ? llmParameters.value.$defs : {}, + }) + : undefined; //---- // CONVERSION @@ -229,12 +227,11 @@ export namespace HttpLlmComposer { name, parameters: llmParameters.value, separated: props.config.separate - ? (LlmSchemaComposer.separateParameters(props.model)({ - predicate: props.config.separate as any, - parameters: - llmParameters.value satisfies ILlmSchema.ModelParameters[Model] as any, + ? LlmSchemaComposer.separate({ + predicate: props.config.separate, + parameters: llmParameters.value, equals: props.config.equals ?? false, - }) as ILlmFunction.ISeparated) + }) : undefined, output: output?.value, description: description[0], @@ -251,12 +248,12 @@ export namespace HttpLlmComposer { }; }; - export const shorten = ( - app: IHttpLlmApplication, + export const shorten = ( + app: IHttpLlmApplication, limit: number = 64, ): void => { const dictionary: Set = new Set(); - const longFunctions: IHttpLlmFunction[] = []; + const longFunctions: IHttpLlmFunction[] = []; for (const func of app.functions) { dictionary.add(func.name); if (func.name.length > limit) { diff --git a/src/composers/migrate/HttpMigrateApplicationComposer.ts b/src/composers/HttpMigrateApplicationComposer.ts similarity index 87% rename from src/composers/migrate/HttpMigrateApplicationComposer.ts rename to src/composers/HttpMigrateApplicationComposer.ts index 7ef93774..29f32335 100644 --- a/src/composers/migrate/HttpMigrateApplicationComposer.ts +++ b/src/composers/HttpMigrateApplicationComposer.ts @@ -1,7 +1,7 @@ -import { OpenApi } from "../../OpenApi"; -import { IHttpMigrateApplication } from "../../structures/IHttpMigrateApplication"; -import { IHttpMigrateRoute } from "../../structures/IHttpMigrateRoute"; -import { EndpointUtil } from "../../utils/EndpointUtil"; +import { OpenApi } from "../OpenApi"; +import { IHttpMigrateApplication } from "../structures/IHttpMigrateApplication"; +import { IHttpMigrateRoute } from "../structures/IHttpMigrateRoute"; +import { EndpointUtil } from "../utils/EndpointUtil"; import { HttpMigrateRouteAccessor } from "./HttpMigrateRouteAccessor"; import { HttpMigrateRouteComposer } from "./HttpMigrateRouteComposer"; diff --git a/src/composers/migrate/HttpMigrateRouteAccessor.ts b/src/composers/HttpMigrateRouteAccessor.ts similarity index 95% rename from src/composers/migrate/HttpMigrateRouteAccessor.ts rename to src/composers/HttpMigrateRouteAccessor.ts index 403d9be2..8918fccd 100644 --- a/src/composers/migrate/HttpMigrateRouteAccessor.ts +++ b/src/composers/HttpMigrateRouteAccessor.ts @@ -1,7 +1,7 @@ -import { IHttpMigrateRoute } from "../../structures/IHttpMigrateRoute"; -import { EndpointUtil } from "../../utils/EndpointUtil"; -import { Escaper } from "../../utils/Escaper"; -import { MapUtil } from "../../utils/MapUtil"; +import { IHttpMigrateRoute } from "../structures/IHttpMigrateRoute"; +import { EndpointUtil } from "../utils/EndpointUtil"; +import { Escaper } from "../utils/Escaper"; +import { MapUtil } from "../utils/MapUtil"; export namespace HttpMigrateRouteAccessor { export const overwrite = (routes: IHttpMigrateRoute[]): void => { diff --git a/src/composers/migrate/HttpMigrateRouteComposer.ts b/src/composers/HttpMigrateRouteComposer.ts similarity index 98% rename from src/composers/migrate/HttpMigrateRouteComposer.ts rename to src/composers/HttpMigrateRouteComposer.ts index cd9ee85b..0c9f3592 100644 --- a/src/composers/migrate/HttpMigrateRouteComposer.ts +++ b/src/composers/HttpMigrateRouteComposer.ts @@ -1,8 +1,8 @@ -import { OpenApi } from "../../OpenApi"; -import { IHttpMigrateRoute } from "../../structures/IHttpMigrateRoute"; -import { EndpointUtil } from "../../utils/EndpointUtil"; -import { Escaper } from "../../utils/Escaper"; -import { OpenApiTypeChecker } from "../../utils/OpenApiTypeChecker"; +import { OpenApi } from "../OpenApi"; +import { IHttpMigrateRoute } from "../structures/IHttpMigrateRoute"; +import { EndpointUtil } from "../utils/EndpointUtil"; +import { Escaper } from "../utils/Escaper"; +import { OpenApiTypeChecker } from "../utils/OpenApiTypeChecker"; export namespace HttpMigrateRouteComposer { export interface IProps { diff --git a/src/composers/llm/LlmDescriptionInverter.ts b/src/composers/LlmDescriptionInverter.ts similarity index 97% rename from src/composers/llm/LlmDescriptionInverter.ts rename to src/composers/LlmDescriptionInverter.ts index ad3cc926..5cbe9762 100644 --- a/src/composers/llm/LlmDescriptionInverter.ts +++ b/src/composers/LlmDescriptionInverter.ts @@ -1,5 +1,5 @@ -import { OpenApi } from "../../OpenApi"; -import { OpenApiExclusiveEmender } from "../../utils/OpenApiExclusiveEmender"; +import { OpenApi } from "../OpenApi"; +import { OpenApiExclusiveEmender } from "../utils/OpenApiExclusiveEmender"; export namespace LlmDescriptionInverter { export const numeric = ( diff --git a/src/composers/llm/LlmParametersComposer.ts b/src/composers/LlmParametersComposer.ts similarity index 85% rename from src/composers/llm/LlmParametersComposer.ts rename to src/composers/LlmParametersComposer.ts index 4671d8ea..2ec38440 100644 --- a/src/composers/llm/LlmParametersComposer.ts +++ b/src/composers/LlmParametersComposer.ts @@ -1,7 +1,7 @@ -import { OpenApi } from "../../OpenApi"; -import { IOpenApiSchemaError } from "../../structures/IOpenApiSchemaError"; -import { IResult } from "../../structures/IResult"; -import { OpenApiTypeChecker } from "../../utils/OpenApiTypeChecker"; +import { OpenApi } from "../OpenApi"; +import { IOpenApiSchemaError } from "../structures/IOpenApiSchemaError"; +import { IResult } from "../structures/IResult"; +import { OpenApiTypeChecker } from "../utils/OpenApiTypeChecker"; /** @internal */ export namespace LlmParametersFinder { diff --git a/src/composers/LlmSchemaComposer.ts b/src/composers/LlmSchemaComposer.ts index 7688fd68..4bfff9e5 100644 --- a/src/composers/LlmSchemaComposer.ts +++ b/src/composers/LlmSchemaComposer.ts @@ -1,97 +1,838 @@ +import { OpenApi } from "../OpenApi"; +import { IJsonSchemaAttribute } from "../structures/IJsonSchemaAttribute"; +import { ILlmFunction } from "../structures/ILlmFunction"; import { ILlmSchema } from "../structures/ILlmSchema"; -import { ChatGptTypeChecker } from "../utils/ChatGptTypeChecker"; -import { ClaudeTypeChecker } from "../utils/ClaudeTypeChecker"; -import { DeepSeekTypeChecker } from "../utils/DeepSeekTypeChecker"; -import { GeminiTypeChecker } from "../utils/GeminiTypeChecker"; -import { LlamaTypeChecker } from "../utils/LlamaTypeChecker"; -import { LlmTypeCheckerV3 } from "../utils/LlmTypeCheckerV3"; -import { LlmTypeCheckerV3_1 } from "../utils/LlmTypeCheckerV3_1"; -import { ChatGptSchemaComposer } from "./llm/ChatGptSchemaComposer"; -import { ClaudeSchemaComposer } from "./llm/ClaudeSchemaComposer"; -import { GeminiSchemaComposer } from "./llm/GeminiSchemaComposer"; -import { LlmSchemaV3Composer } from "./llm/LlmSchemaV3Composer"; -import { LlmSchemaV3_1Composer } from "./llm/LlmSchemaV3_1Composer"; +import { IOpenApiSchemaError } from "../structures/IOpenApiSchemaError"; +import { IResult } from "../structures/IResult"; +import { LlmTypeChecker } from "../utils/LlmTypeChecker"; +import { NamingConvention } from "../utils/NamingConvention"; +import { OpenApiConstraintShifter } from "../utils/OpenApiConstraintShifter"; +import { OpenApiTypeChecker } from "../utils/OpenApiTypeChecker"; +import { OpenApiValidator } from "../utils/OpenApiValidator"; +import { JsonDescriptionUtil } from "../utils/internal/JsonDescriptionUtil"; +import { LlmDescriptionInverter } from "./LlmDescriptionInverter"; +import { LlmParametersFinder } from "./LlmParametersComposer"; export namespace LlmSchemaComposer { - export const parameters = (model: Model) => - PARAMETERS_CASTERS[model]; + /* ----------------------------------------------------------- + CONVERTERS + ----------------------------------------------------------- */ + export const parameters = (props: { + config?: Partial; + components: OpenApi.IComponents; + schema: OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference; + accessor?: string; + refAccessor?: string; + }): IResult => { + const config: ILlmSchema.IConfig = getConfig(props.config); + const entity: IResult = + LlmParametersFinder.parameters({ + ...props, + method: "LlmSchemaComposer.parameters", + }); + if (entity.success === false) return entity; - export const schema = (model: Model) => - SCHEMA_CASTERS[model]; + const $defs: Record = {}; + const result: IResult = transform({ + ...props, + config, + $defs, + schema: entity.value, + }); + if (result.success === false) return result; + return { + success: true, + value: { + ...(result.value as ILlmSchema.IObject), + additionalProperties: false, + $defs, + description: OpenApiTypeChecker.isReference(props.schema) + ? JsonDescriptionUtil.cascade({ + prefix: "#/components/schemas/", + components: props.components, + schema: { + ...props.schema, + description: result.value.description, + }, + escape: true, + }) + : result.value.description, + } satisfies ILlmSchema.IParameters, + }; + }; - export const defaultConfig = (model: Model) => - DEFAULT_CONFIGS[model]; + export const schema = (props: { + config?: Partial; + components: OpenApi.IComponents; + $defs: Record; + schema: OpenApi.IJsonSchema; + accessor?: string; + refAccessor?: string; + }): IResult => + transform({ + config: getConfig(props.config), + components: props.components, + $defs: props.$defs, + schema: props.schema, + accessor: props.accessor, + refAccessor: props.refAccessor, + }); - export const typeChecker = (model: Model) => - TYPE_CHECKERS[model]; + const transform = (props: { + config: ILlmSchema.IConfig; + components: OpenApi.IComponents; + $defs: Record; + schema: OpenApi.IJsonSchema; + accessor?: string; + refAccessor?: string; + }): IResult => { + // PREPARE ASSETS + const union: Array = []; + const attribute: IJsonSchemaAttribute = { + title: props.schema.title, + description: props.schema.description, + deprecated: props.schema.deprecated, + readOnly: props.schema.readOnly, + writeOnly: props.schema.writeOnly, + example: props.schema.example, + examples: props.schema.examples, + ...Object.fromEntries( + Object.entries(props.schema).filter( + ([key, value]) => key.startsWith("x-") && value !== undefined, + ), + ), + }; - export const separateParameters = ( - model: Model, - ) => SEPARATE_PARAMETERS[model]; + // VALIDADTE SCHEMA + const reasons: IOpenApiSchemaError.IReason[] = []; + OpenApiTypeChecker.visit({ + closure: (next, accessor) => { + if (props.config.strict === true) { + // STRICT MODE VALIDATION + reasons.push(...validateStrict(next, accessor)); + } + if (OpenApiTypeChecker.isTuple(next)) + reasons.push({ + accessor, + schema: next, + message: `LLM does not allow tuple type.`, + }); + else if (OpenApiTypeChecker.isReference(next)) { + // UNABLE TO FIND MATCHED REFERENCE + const key: string = next.$ref.split("#/components/schemas/")[1]; + if (props.components.schemas?.[key] === undefined) + reasons.push({ + schema: next, + accessor: accessor, + message: `unable to find reference type ${JSON.stringify(key)}.`, + }); + } + }, + components: props.components, + schema: props.schema, + accessor: props.accessor, + refAccessor: props.refAccessor, + }); + if (reasons.length > 0) + return { + success: false, + error: { + method: "LlmSchemaComposer.schema", + message: "Failed to compose LLM schema", + reasons, + }, + }; - export const invert = (model: Model) => - INVERTS[model]; + const visitConstant = (input: OpenApi.IJsonSchema): void => { + const insert = (value: any): void => { + const matched: + | ILlmSchema.IString + | ILlmSchema.INumber + | ILlmSchema.IBoolean + | undefined = union.find( + (u) => + (u as (IJsonSchemaAttribute & { type: string }) | undefined) + ?.type === typeof value, + ) as ILlmSchema.IString | undefined; + if (matched !== undefined) { + matched.enum ??= []; + matched.enum.push(value); + } else + union.push({ + type: typeof value as "number", + enum: [value], + }); + }; + if (OpenApiTypeChecker.isConstant(input)) insert(input.const); + else if (OpenApiTypeChecker.isOneOf(input)) + input.oneOf.forEach(visitConstant); + }; + const visit = (input: OpenApi.IJsonSchema, accessor: string): void => { + if (OpenApiTypeChecker.isOneOf(input)) { + // UNION TYPE + input.oneOf.forEach((s, i) => visit(s, `${accessor}.oneOf[${i}]`)); + } else if (OpenApiTypeChecker.isReference(input)) { + // REFERENCE TYPE + const key: string = input.$ref.split("#/components/schemas/")[1]; + const target: OpenApi.IJsonSchema | undefined = + props.components.schemas?.[key]; + if (target === undefined) return; + else if ( + // KEEP THE REFERENCE TYPE + props.config.reference === true || + OpenApiTypeChecker.isRecursiveReference({ + components: props.components, + schema: input, + }) + ) { + const out = () => { + union.push({ + ...input, + $ref: `#/$defs/${key}`, + }); + }; + if (props.$defs[key] !== undefined) return out(); - /** @internal */ - export const isDefs = ( - model: Model, - ): boolean => IS_DEFS[model](); -} + props.$defs[key] = {}; + const converted: IResult = transform( + { + config: props.config, + components: props.components, + $defs: props.$defs, + schema: target, + refAccessor: props.refAccessor, + accessor: `${props.refAccessor ?? "$def"}[${JSON.stringify(key)}]`, + }, + ); + if (converted.success === false) return; // UNREACHABLE + props.$defs[key] = converted.value; + return out(); + } else { + // DISCARD THE REFERENCE TYPE + const length: number = union.length; + visit(target, accessor); + visitConstant(target); + if (length === union.length - 1) + union[union.length - 1] = { + ...union[union.length - 1]!, + description: JsonDescriptionUtil.cascade({ + prefix: "#/components/schemas/", + components: props.components, + schema: input, + escape: true, + }), + }; + else + attribute.description = JsonDescriptionUtil.cascade({ + prefix: "#/components/schemas/", + components: props.components, + schema: input, + escape: true, + }); + } + } else if (OpenApiTypeChecker.isObject(input)) { + // OBJECT TYPE + const properties: Record = Object.fromEntries( + Object.entries(input.properties ?? {}) + .map(([key, value]) => { + const converted: IResult = + transform({ + config: props.config, + components: props.components, + $defs: props.$defs, + schema: value, + refAccessor: props.refAccessor, + accessor: `${props.accessor ?? "$input.schema"}.properties[${JSON.stringify(key)}]`, + }); + if (converted.success === false) { + reasons.push(...converted.error.reasons); + return [key, null]; + } + return [key, converted.value]; + }) + .filter(([, value]) => value !== null), + ); + if (Object.values(properties).some((v) => v === null)) return; -const PARAMETERS_CASTERS = { - chatgpt: ChatGptSchemaComposer.parameters, - claude: ClaudeSchemaComposer.parameters, - gemini: GeminiSchemaComposer.parameters, - "3.0": LlmSchemaV3Composer.parameters, - "3.1": LlmSchemaV3_1Composer.parameters, -}; + const additionalProperties: ILlmSchema | boolean | undefined | null = + (() => { + if ( + typeof input.additionalProperties === "object" && + input.additionalProperties !== null + ) { + const converted: IResult = + transform({ + config: props.config, + components: props.components, + $defs: props.$defs, + schema: input.additionalProperties, + refAccessor: props.refAccessor, + accessor: `${accessor}.additionalProperties`, + }); + if (converted.success === false) { + reasons.push(...converted.error.reasons); + return null; + } + return converted.value; + } + return props.config.strict === true + ? false + : input.additionalProperties; + })(); + if (additionalProperties === null) return; + union.push({ + ...input, + properties, + additionalProperties, + required: input.required ?? [], + description: + props.config.strict === true + ? JsonDescriptionUtil.take(input) + : input.description, + }); + } else if (OpenApiTypeChecker.isArray(input)) { + // ARRAY TYPE + const items: IResult = transform({ + config: props.config, + components: props.components, + $defs: props.$defs, + schema: input.items, + refAccessor: props.refAccessor, + accessor: `${accessor}.items`, + }); + if (items.success === false) { + reasons.push(...items.error.reasons); + return; + } + union.push( + props.config.strict === true + ? OpenApiConstraintShifter.shiftArray({ + ...input, + items: items.value, + }) + : { + ...input, + items: items.value, + }, + ); + } else if (OpenApiTypeChecker.isString(input)) + union.push( + props.config.strict === true + ? OpenApiConstraintShifter.shiftString({ ...input }) + : input, + ); + else if ( + OpenApiTypeChecker.isNumber(input) || + OpenApiTypeChecker.isInteger(input) + ) + union.push( + props.config.strict === true + ? OpenApiConstraintShifter.shiftNumeric({ ...input }) + : input, + ); + else if (OpenApiTypeChecker.isTuple(input)) + return; // UNREACHABLE + else if (OpenApiTypeChecker.isConstant(input) === false) + union.push({ ...input }); + }; -const SCHEMA_CASTERS = { - chatgpt: ChatGptSchemaComposer.schema, - claude: ClaudeSchemaComposer.schema, - gemini: GeminiSchemaComposer.schema, - "3.0": LlmSchemaV3Composer.schema, - "3.1": LlmSchemaV3_1Composer.schema, -}; + visitConstant(props.schema); + visit(props.schema, props.accessor ?? "$input.schema"); -const SEPARATE_PARAMETERS = { - chatgpt: ChatGptSchemaComposer.separateParameters, - claude: ClaudeSchemaComposer.separateParameters, - gemini: GeminiSchemaComposer.separateParameters, - "3.0": LlmSchemaV3Composer.separateParameters, - "3.1": LlmSchemaV3_1Composer.separateParameters, -}; + if (reasons.length > 0) + return { + success: false, + error: { + method: "LlmSchemaComposer.schema", + message: "Failed to compose LLM schema", + reasons, + }, + }; + else if (union.length === 0) + return { + // unknown type + success: true, + value: { + ...attribute, + type: undefined, + }, + }; + else if (union.length === 1) + return { + // single type + success: true, + value: { + ...attribute, + ...union[0], + description: + props.config.strict === true && LlmTypeChecker.isReference(union[0]) + ? undefined + : (union[0].description ?? attribute.description), + }, + }; + return { + success: true, + value: { + ...attribute, + anyOf: union.map((u) => ({ + ...u, + description: + props.config.strict === true && LlmTypeChecker.isReference(u) + ? undefined + : u.description, + })), + "x-discriminator": + OpenApiTypeChecker.isOneOf(props.schema) && + props.schema.discriminator !== undefined && + props.schema.oneOf.length === union.length && + union.every( + (e) => LlmTypeChecker.isReference(e) || LlmTypeChecker.isNull(e), + ) + ? { + propertyName: props.schema.discriminator.propertyName, + mapping: + props.schema.discriminator.mapping !== undefined + ? Object.fromEntries( + Object.entries(props.schema.discriminator.mapping).map( + ([key, value]) => [ + key, + `#/$defs/${value.split("/").at(-1)}`, + ], + ), + ) + : undefined, + } + : undefined, + }, + }; + }; -const INVERTS = { - chatgpt: ChatGptSchemaComposer.invert, - claude: ClaudeSchemaComposer.invert, - gemini: GeminiSchemaComposer.invert, - "3.0": LlmSchemaV3Composer.invert, - "3.1": LlmSchemaV3_1Composer.invert, -}; + /* ----------------------------------------------------------- + SEPARATORS + ----------------------------------------------------------- */ + export const separate = (props: { + parameters: ILlmSchema.IParameters; + predicate: (schema: ILlmSchema) => boolean; + convention?: (key: string, type: "llm" | "human") => string; + equals?: boolean; + }): ILlmFunction.ISeparated => { + const convention = + props.convention ?? + ((key, type) => `${key}.${NamingConvention.capitalize(type)}`); + const [llm, human] = separateObject({ + predicate: props.predicate, + convention, + $defs: props.parameters.$defs, + schema: props.parameters, + }); + if (llm === null || human === null) + return { + llm: (llm as ILlmSchema.IParameters | null) ?? { + type: "object", + properties: {} as Record, + required: [], + additionalProperties: false, + $defs: {}, + }, + human: human as ILlmSchema.IParameters | null, + }; + const output: ILlmFunction.ISeparated = { + llm: { + ...llm, + $defs: Object.fromEntries( + Object.entries(props.parameters.$defs).filter(([key]) => + key.endsWith(".Llm"), + ), + ), + additionalProperties: false, + }, + human: { + ...human, + $defs: Object.fromEntries( + Object.entries(props.parameters.$defs).filter(([key]) => + key.endsWith(".Human"), + ), + ), + additionalProperties: false, + }, + }; + for (const key of Object.keys(props.parameters.$defs)) + if (key.endsWith(".Llm") === false && key.endsWith(".Human") === false) + delete props.parameters.$defs[key]; + if (Object.keys(output.llm.properties).length !== 0) { + const components: OpenApi.IComponents = {}; + output.validate = OpenApiValidator.create({ + components, + schema: invert({ + components, + schema: output.llm, + $defs: output.llm.$defs, + }), + required: true, + equals: props.equals, + }); + } + return output; + }; -const DEFAULT_CONFIGS = { - chatgpt: ChatGptSchemaComposer.DEFAULT_CONFIG, - claude: ClaudeSchemaComposer.DEFAULT_CONFIG, - gemini: GeminiSchemaComposer.DEFAULT_CONFIG, - "3.0": LlmSchemaV3Composer.DEFAULT_CONFIG, - "3.1": LlmSchemaV3_1Composer.DEFAULT_CONFIG, -}; + const separateStation = (props: { + predicate: (schema: ILlmSchema) => boolean; + convention: (key: string, type: "llm" | "human") => string; + $defs: Record; + schema: ILlmSchema; + }): [ILlmSchema | null, ILlmSchema | null] => { + if (props.predicate(props.schema) === true) return [null, props.schema]; + else if ( + LlmTypeChecker.isUnknown(props.schema) || + LlmTypeChecker.isAnyOf(props.schema) + ) + return [props.schema, null]; + else if (LlmTypeChecker.isObject(props.schema)) + return separateObject({ + predicate: props.predicate, + convention: props.convention, + $defs: props.$defs, + schema: props.schema, + }); + else if (LlmTypeChecker.isArray(props.schema)) + return separateArray({ + predicate: props.predicate, + convention: props.convention, + $defs: props.$defs, + schema: props.schema, + }); + else if (LlmTypeChecker.isReference(props.schema)) + return separateReference({ + predicate: props.predicate, + convention: props.convention, + $defs: props.$defs, + schema: props.schema, + }); + return [props.schema, null]; + }; -const TYPE_CHECKERS = { - chatgpt: ChatGptTypeChecker, - claude: ClaudeTypeChecker, - deepseek: DeepSeekTypeChecker, - gemini: GeminiTypeChecker, - llama: LlamaTypeChecker, - "3.0": LlmTypeCheckerV3, - "3.1": LlmTypeCheckerV3_1, -}; + const separateArray = (props: { + predicate: (schema: ILlmSchema) => boolean; + convention: (key: string, type: "llm" | "human") => string; + $defs: Record; + schema: ILlmSchema.IArray; + }): [ILlmSchema.IArray | null, ILlmSchema.IArray | null] => { + const [x, y] = separateStation({ + predicate: props.predicate, + convention: props.convention, + $defs: props.$defs, + schema: props.schema.items, + }); + return [ + x !== null + ? { + ...props.schema, + items: x, + } + : null, + y !== null + ? { + ...props.schema, + items: y, + } + : null, + ]; + }; + + const separateObject = (props: { + $defs: Record; + predicate: (schema: ILlmSchema) => boolean; + convention: (key: string, type: "llm" | "human") => string; + schema: ILlmSchema.IObject; + }): [ILlmSchema.IObject | null, ILlmSchema.IObject | null] => { + // EMPTY OBJECT + if ( + Object.keys(props.schema.properties ?? {}).length === 0 && + !!props.schema.additionalProperties === false + ) + return [props.schema, null]; + + const llm = { + ...props.schema, + properties: {} as Record, + additionalProperties: props.schema.additionalProperties, + } satisfies ILlmSchema.IObject; + const human = { + ...props.schema, + properties: {} as Record, + } satisfies ILlmSchema.IObject; + + for (const [key, value] of Object.entries(props.schema.properties ?? {})) { + const [x, y] = separateStation({ + predicate: props.predicate, + convention: props.convention, + $defs: props.$defs, + schema: value, + }); + if (x !== null) llm.properties[key] = x; + if (y !== null) human.properties[key] = y; + } + if ( + typeof props.schema.additionalProperties === "object" && + props.schema.additionalProperties !== null + ) { + const [dx, dy] = separateStation({ + predicate: props.predicate, + convention: props.convention, + $defs: props.$defs, + schema: props.schema.additionalProperties, + }); + llm.additionalProperties = dx ?? false; + human.additionalProperties = dy ?? false; + } + return [ + !!Object.keys(llm.properties).length || !!llm.additionalProperties + ? shrinkRequired(llm) + : null, + !!Object.keys(human.properties).length || human.additionalProperties + ? shrinkRequired(human) + : null, + ]; + }; + + const separateReference = (props: { + predicate: (schema: ILlmSchema) => boolean; + convention: (key: string, type: "llm" | "human") => string; + $defs: Record; + schema: ILlmSchema.IReference; + }): [ILlmSchema.IReference | null, ILlmSchema.IReference | null] => { + const key: string = props.schema.$ref.split("#/$defs/")[1]; + const humanKey: string = props.convention(key, "human"); + const llmKey: string = props.convention(key, "llm"); + + // FIND EXISTING + if (props.$defs?.[humanKey] || props.$defs?.[llmKey]) + return [ + props.$defs?.[llmKey] + ? { + ...props.schema, + $ref: `#/$defs/${llmKey}`, + } + : null, + props.$defs?.[humanKey] + ? { + ...props.schema, + $ref: `#/$defs/${humanKey}`, + } + : null, + ]; + + // PRE-ASSIGNMENT + props.$defs![llmKey] = {}; + props.$defs![humanKey] = {}; + + // DO COMPOSE + const schema: ILlmSchema = props.$defs?.[key]!; + const [llm, human] = separateStation({ + predicate: props.predicate, + convention: props.convention, + $defs: props.$defs, + schema, + }); + if (llm !== null) Object.assign(props.$defs[llmKey], llm); + if (human !== null) Object.assign(props.$defs[humanKey], human); + + // ONLY ONE + if (llm === null || human === null) { + delete props.$defs[llmKey]; + delete props.$defs[humanKey]; + return llm === null ? [null, props.schema] : [props.schema, null]; + } + + // BOTH OF THEM + return [ + llm !== null + ? { + ...props.schema, + $ref: `#/$defs/${llmKey}`, + } + : null, + human !== null + ? { + ...props.schema, + $ref: `#/$defs/${humanKey}`, + } + : null, + ]; + }; + + const shrinkRequired = (s: ILlmSchema.IObject): ILlmSchema.IObject => { + s.required = s.required.filter((key) => s.properties?.[key] !== undefined); + return s; + }; + + /* ----------------------------------------------------------- + INVERTERS + ----------------------------------------------------------- */ + export const invert = (props: { + components: OpenApi.IComponents; + schema: ILlmSchema; + $defs: Record; + }): OpenApi.IJsonSchema => { + const union: OpenApi.IJsonSchema[] = []; + const attribute: IJsonSchemaAttribute = { + title: props.schema.title, + description: props.schema.description, + deprecated: props.schema.deprecated, + readOnly: props.schema.readOnly, + writeOnly: props.schema.writeOnly, + example: props.schema.example, + examples: props.schema.examples, + ...Object.fromEntries( + Object.entries(props.schema).filter( + ([key, value]) => key.startsWith("x-") && value !== undefined, + ), + ), + }; + + const next = (schema: ILlmSchema): OpenApi.IJsonSchema => + invert({ + components: props.components, + $defs: props.$defs, + schema, + }); + const visit = (schema: ILlmSchema): void => { + if (LlmTypeChecker.isArray(schema)) + union.push({ + ...schema, + ...LlmDescriptionInverter.array(schema.description), + items: next(schema.items), + }); + else if (LlmTypeChecker.isObject(schema)) + union.push({ + ...schema, + properties: Object.fromEntries( + Object.entries(schema.properties).map(([key, value]) => [ + key, + next(value), + ]), + ), + additionalProperties: + typeof schema.additionalProperties === "object" && + schema.additionalProperties !== null + ? next(schema.additionalProperties) + : schema.additionalProperties, + }); + else if (LlmTypeChecker.isAnyOf(schema)) schema.anyOf.forEach(visit); + else if (LlmTypeChecker.isReference(schema)) { + const key: string = schema.$ref.split("#/$defs/")[1]; + if (props.components.schemas?.[key] === undefined) { + props.components.schemas ??= {}; + props.components.schemas[key] = {}; + props.components.schemas[key] = next(props.$defs[key] ?? {}); + } + union.push({ + ...schema, + $ref: `#/components/schemas/${key}`, + }); + } else if (LlmTypeChecker.isBoolean(schema)) + if (!!schema.enum?.length) + schema.enum.forEach((v) => + union.push({ + const: v, + }), + ); + else union.push(schema); + else if ( + LlmTypeChecker.isInteger(schema) || + LlmTypeChecker.isNumber(schema) + ) + if (!!schema.enum?.length) + schema.enum.forEach((v) => + union.push({ + const: v, + }), + ); + else + union.push({ + ...schema, + ...LlmDescriptionInverter.numeric(schema.description), + ...{ enum: undefined }, + }); + else if (LlmTypeChecker.isString(schema)) + if (!!schema.enum?.length) + schema.enum.forEach((v) => + union.push({ + const: v, + }), + ); + else + union.push({ + ...schema, + ...LlmDescriptionInverter.string(schema.description), + ...{ enum: undefined }, + }); + else + union.push({ + ...schema, + }); + }; + visit(props.schema); + + return { + ...attribute, + ...(union.length === 0 + ? { type: undefined } + : union.length === 1 + ? { ...union[0] } + : { + oneOf: union.map((u) => ({ ...u, nullable: undefined })), + discriminator: + LlmTypeChecker.isAnyOf(props.schema) && + props.schema["x-discriminator"] !== undefined + ? { + propertyName: + props.schema["x-discriminator"].propertyName, + mapping: + props.schema["x-discriminator"].mapping !== undefined + ? Object.fromEntries( + Object.entries( + props.schema["x-discriminator"].mapping, + ).map(([key, value]) => [ + key, + `#/components/schemas/${value.split("/").at(-1)}`, + ]), + ) + : undefined, + } + : undefined, + }), + } satisfies OpenApi.IJsonSchema; + }; + + export const getConfig = ( + config?: Partial | undefined, + ): ILlmSchema.IConfig => ({ + reference: config?.reference ?? true, + strict: config?.strict ?? false, + }); +} -const IS_DEFS = { - chatgpt: () => ChatGptSchemaComposer.IS_DEFS, - claude: () => ClaudeSchemaComposer.IS_DEFS, - gemini: () => GeminiSchemaComposer.IS_DEFS, - "3.0": () => LlmSchemaV3Composer.IS_DEFS, - "3.1": () => LlmSchemaV3_1Composer.IS_DEFS, +const validateStrict = ( + schema: OpenApi.IJsonSchema, + accessor: string, +): IOpenApiSchemaError.IReason[] => { + const reasons: IOpenApiSchemaError.IReason[] = []; + if (OpenApiTypeChecker.isObject(schema)) { + if (!!schema.additionalProperties) + reasons.push({ + schema: schema, + accessor: `${accessor}.additionalProperties`, + message: + "LLM does not allow additionalProperties in strict mode, the dynamic key typed object.", + }); + for (const key of Object.keys(schema.properties ?? {})) + if (schema.required?.includes(key) === false) + reasons.push({ + schema: schema, + accessor: `${accessor}.properties.${key}`, + message: "LLM does not allow optional properties in strict mode.", + }); + } + return reasons; }; diff --git a/src/composers/llm/ChatGptSchemaComposer.ts b/src/composers/llm/ChatGptSchemaComposer.ts deleted file mode 100644 index 58deb775..00000000 --- a/src/composers/llm/ChatGptSchemaComposer.ts +++ /dev/null @@ -1,178 +0,0 @@ -import { OpenApi } from "../../OpenApi"; -import { IChatGptSchema } from "../../structures/IChatGptSchema"; -import { IGeminiSchema } from "../../structures/IGeminiSchema"; -import { ILlmFunction } from "../../structures/ILlmFunction"; -import { ILlmSchemaV3_1 } from "../../structures/ILlmSchemaV3_1"; -import { IOpenApiSchemaError } from "../../structures/IOpenApiSchemaError"; -import { IResult } from "../../structures/IResult"; -import { GeminiTypeChecker } from "../../utils/GeminiTypeChecker"; -import { OpenApiConstraintShifter } from "../../utils/OpenApiConstraintShifter"; -import { OpenApiTypeChecker } from "../../utils/OpenApiTypeChecker"; -import { JsonDescriptionUtil } from "../../utils/internal/JsonDescriptionUtil"; -import { GeminiSchemaComposer } from "./GeminiSchemaComposer"; -import { LlmSchemaV3_1Composer } from "./LlmSchemaV3_1Composer"; - -export namespace ChatGptSchemaComposer { - /** @internal */ - export const IS_DEFS = true; - - export const DEFAULT_CONFIG: IChatGptSchema.IConfig = { - reference: true, - strict: false, - }; - - export const parameters = (props: { - config: IChatGptSchema.IConfig; - components: OpenApi.IComponents; - schema: OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference; - accessor?: string; - refAccessor?: string; - }): IResult => { - // polyfill - props.config.strict ??= false; - - // validate - const result: IResult = - LlmSchemaV3_1Composer.parameters({ - ...props, - config: { - reference: props.config.reference, - constraint: false, - }, - validate: props.config.strict === true ? validateStrict : undefined, - }); - if (result.success === false) return result; - - // returns with transformation - for (const key of Object.keys(result.value.$defs)) - result.value.$defs[key] = transform({ - config: props.config, - schema: result.value.$defs[key], - }); - return { - success: true, - value: transform({ - config: props.config, - schema: result.value, - }) as IChatGptSchema.IParameters, - }; - }; - - export const schema = (props: { - config: IChatGptSchema.IConfig; - components: OpenApi.IComponents; - $defs: Record; - schema: OpenApi.IJsonSchema; - accessor?: string; - refAccessor?: string; - }): IResult => { - // polyfill - props.config.strict ??= false; - - // validate - const oldbie: Set = new Set(Object.keys(props.$defs)); - const result: IResult = - LlmSchemaV3_1Composer.schema({ - ...props, - config: { - reference: props.config.reference, - constraint: false, - }, - validate: props.config.strict === true ? validateStrict : undefined, - }); - if (result.success === false) return result; - - // returns with transformation - for (const key of Object.keys(props.$defs)) - if (oldbie.has(key) === false) - props.$defs[key] = transform({ - config: props.config, - schema: props.$defs[key], - }); - return { - success: true, - value: transform({ - config: props.config, - schema: result.value, - }), - }; - }; - - const validateStrict = ( - schema: OpenApi.IJsonSchema, - accessor: string, - ): IOpenApiSchemaError.IReason[] => { - const reasons: IOpenApiSchemaError.IReason[] = []; - if (OpenApiTypeChecker.isObject(schema)) { - if (!!schema.additionalProperties) - reasons.push({ - schema: schema, - accessor: `${accessor}.additionalProperties`, - message: - "ChatGPT does not allow additionalProperties in strict mode, the dynamic key typed object.", - }); - for (const key of Object.keys(schema.properties ?? {})) - if (schema.required?.includes(key) === false) - reasons.push({ - schema: schema, - accessor: `${accessor}.properties.${key}`, - message: - "ChatGPT does not allow optional properties in strict mode.", - }); - } - return reasons; - }; - - const transform = (props: { - config: IChatGptSchema.IConfig; - schema: ILlmSchemaV3_1; - }): IChatGptSchema => { - const schema: IGeminiSchema = GeminiSchemaComposer.transform(props); - GeminiTypeChecker.visit({ - closure: (next) => { - if (GeminiTypeChecker.isString(next)) - OpenApiConstraintShifter.shiftString(next); - else if ( - GeminiTypeChecker.isInteger(next) || - GeminiTypeChecker.isNumber(next) - ) - OpenApiConstraintShifter.shiftNumeric(next); - else if (GeminiTypeChecker.isArray(next)) - OpenApiConstraintShifter.shiftArray(next); - else if ( - GeminiTypeChecker.isObject(next) && - props.config.strict === true - ) { - next.additionalProperties = false; - next.description = JsonDescriptionUtil.take(next); - } - }, - schema, - }); - if (props.config.strict === true) - GeminiTypeChecker.visit({ - closure: (next) => { - if (GeminiTypeChecker.isReference(next)) { - next.title = undefined; - next.description = undefined; - } - }, - schema, - }); - return schema satisfies IChatGptSchema; - }; - - export const separateParameters = (props: { - parameters: IChatGptSchema.IParameters; - predicate: (schema: IChatGptSchema) => boolean; - convention?: (key: string, type: "llm" | "human") => string; - equals?: boolean; - }): ILlmFunction.ISeparated<"chatgpt"> => - GeminiSchemaComposer.separateParameters(props); - - export const invert = (props: { - components: OpenApi.IComponents; - schema: IChatGptSchema; - $defs: Record; - }): OpenApi.IJsonSchema => GeminiSchemaComposer.invert(props); -} diff --git a/src/composers/llm/ClaudeSchemaComposer.ts b/src/composers/llm/ClaudeSchemaComposer.ts deleted file mode 100644 index bf70b619..00000000 --- a/src/composers/llm/ClaudeSchemaComposer.ts +++ /dev/null @@ -1,63 +0,0 @@ -import { OpenApi } from "../../OpenApi"; -import { IClaudeSchema } from "../../structures/IClaudeSchema"; -import { ILlmFunction } from "../../structures/ILlmFunction"; -import { IOpenApiSchemaError } from "../../structures/IOpenApiSchemaError"; -import { IResult } from "../../structures/IResult"; -import { LlmSchemaV3_1Composer } from "./LlmSchemaV3_1Composer"; - -export namespace ClaudeSchemaComposer { - /** @internal */ - export const IS_DEFS = true; - - export const DEFAULT_CONFIG: IClaudeSchema.IConfig = { - reference: true, - }; - - export const parameters = (props: { - config: IClaudeSchema.IConfig; - components: OpenApi.IComponents; - schema: OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference; - accessor?: string; - refAccessor?: string; - }): IResult => - LlmSchemaV3_1Composer.parameters({ - ...props, - config: { - reference: props.config.reference, - constraint: true, - }, - }); - - export const schema = (props: { - config: IClaudeSchema.IConfig; - components: OpenApi.IComponents; - $defs: Record; - schema: OpenApi.IJsonSchema; - accessor?: string; - refAccessor?: string; - }): IResult => - LlmSchemaV3_1Composer.schema({ - ...props, - config: { - reference: props.config.reference, - constraint: true, - }, - }); - - export const separateParameters = (props: { - parameters: IClaudeSchema.IParameters; - predicate: (schema: IClaudeSchema) => boolean; - convention?: (key: string, type: "llm" | "human") => string; - equals?: boolean; - }): ILlmFunction.ISeparated<"claude"> => { - const separated: ILlmFunction.ISeparated<"3.1"> = - LlmSchemaV3_1Composer.separateParameters(props); - return separated as any as ILlmFunction.ISeparated<"claude">; - }; - - export const invert = (props: { - components: OpenApi.IComponents; - schema: IClaudeSchema; - $defs: Record; - }): OpenApi.IJsonSchema => LlmSchemaV3_1Composer.invert(props); -} diff --git a/src/composers/llm/GeminiSchemaComposer.ts b/src/composers/llm/GeminiSchemaComposer.ts deleted file mode 100644 index 7f4ac861..00000000 --- a/src/composers/llm/GeminiSchemaComposer.ts +++ /dev/null @@ -1,594 +0,0 @@ -import { OpenApi } from "../../OpenApi"; -import { IGeminiSchema } from "../../structures/IGeminiSchema"; -import { IJsonSchemaAttribute } from "../../structures/IJsonSchemaAttribute"; -import { ILlmFunction } from "../../structures/ILlmFunction"; -import { ILlmSchemaV3_1 } from "../../structures/ILlmSchemaV3_1"; -import { IOpenApiSchemaError } from "../../structures/IOpenApiSchemaError"; -import { IResult } from "../../structures/IResult"; -import { GeminiTypeChecker } from "../../utils/GeminiTypeChecker"; -import { LlmTypeCheckerV3_1 } from "../../utils/LlmTypeCheckerV3_1"; -import { NamingConvention } from "../../utils/NamingConvention"; -import { OpenApiTypeChecker } from "../../utils/OpenApiTypeChecker"; -import { OpenApiValidator } from "../../utils/OpenApiValidator"; -import { LlmDescriptionInverter } from "./LlmDescriptionInverter"; -import { LlmSchemaV3_1Composer } from "./LlmSchemaV3_1Composer"; - -export namespace GeminiSchemaComposer { - /** @internal */ - export const IS_DEFS = true; - export const DEFAULT_CONFIG: IGeminiSchema.IConfig = { - reference: true, - }; - - /* ----------------------------------------------------------- - CONVERTERS - ----------------------------------------------------------- */ - export const parameters = (props: { - config: IGeminiSchema.IConfig; - components: OpenApi.IComponents; - schema: OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference; - accessor?: string; - refAccessor?: string; - }): IResult => { - // validate - const result: IResult = - LlmSchemaV3_1Composer.parameters({ - ...props, - config: { - reference: props.config.reference, - constraint: true, - }, - }); - if (result.success === false) return result; - - // returns with transformation - for (const key of Object.keys(result.value.$defs)) - result.value.$defs[key] = transform({ - config: props.config, - schema: result.value.$defs[key], - }); - return { - success: true, - value: transform({ - config: props.config, - schema: result.value, - }) as IGeminiSchema.IParameters, - }; - }; - - export const schema = (props: { - config: IGeminiSchema.IConfig; - components: OpenApi.IComponents; - $defs: Record; - schema: OpenApi.IJsonSchema; - accessor?: string; - refAccessor?: string; - }): IResult => { - // validate - const oldbie: Set = new Set(Object.keys(props.$defs)); - const result: IResult = - LlmSchemaV3_1Composer.schema({ - ...props, - config: { - reference: props.config.reference, - constraint: true, - }, - }); - if (result.success === false) return result; - - // returns with transformation - for (const key of Object.keys(props.$defs)) - if (oldbie.has(key) === false) - props.$defs[key] = transform({ - config: props.config, - schema: props.$defs[key], - }); - return { - success: true, - value: transform({ - config: props.config, - schema: result.value, - }), - }; - }; - - /** @internal */ - export const transform = (props: { - config: IGeminiSchema.IConfig; - schema: ILlmSchemaV3_1; - }): IGeminiSchema => { - const union: Array = []; - const attribute: IJsonSchemaAttribute = { - title: props.schema.title, - description: props.schema.description, - deprecated: props.schema.deprecated, - readOnly: props.schema.readOnly, - writeOnly: props.schema.writeOnly, - example: props.schema.example, - examples: props.schema.examples, - ...Object.fromEntries( - Object.entries(schema).filter( - ([key, value]) => key.startsWith("x-") && value !== undefined, - ), - ), - }; - const visit = (input: ILlmSchemaV3_1): void => { - if (LlmTypeCheckerV3_1.isOneOf(input)) input.oneOf.forEach(visit); - else if (LlmTypeCheckerV3_1.isArray(input)) - union.push({ - ...input, - items: transform({ - config: props.config, - schema: input.items, - }), - }); - else if (LlmTypeCheckerV3_1.isObject(input)) - union.push({ - ...input, - properties: Object.fromEntries( - Object.entries(input.properties).map(([key, value]) => [ - key, - transform({ - config: props.config, - schema: value, - }), - ]), - ), - additionalProperties: - typeof input.additionalProperties === "object" && - input.additionalProperties !== null - ? transform({ - config: props.config, - schema: input.additionalProperties, - }) - : input.additionalProperties, - }); - else if (LlmTypeCheckerV3_1.isConstant(input) === false) - union.push(input); - }; - const visitConstant = (input: ILlmSchemaV3_1): void => { - const insert = (value: any): void => { - const matched: IGeminiSchema.IString | undefined = union.find( - (u) => - (u as (IJsonSchemaAttribute & { type: string }) | undefined) - ?.type === typeof value, - ) as IGeminiSchema.IString | undefined; - if (matched !== undefined) { - matched.enum ??= []; - matched.enum.push(value); - } else - union.push({ - type: typeof value as "number", - enum: [value], - }); - }; - if (OpenApiTypeChecker.isConstant(input)) insert(input.const); - else if (OpenApiTypeChecker.isOneOf(input)) - input.oneOf.forEach((s) => visitConstant(s as ILlmSchemaV3_1)); - }; - visit(props.schema); - visitConstant(props.schema); - if (union.length === 0) - return { - ...attribute, - type: undefined, - }; - else if (union.length === 1) - return { - ...attribute, - ...union[0], - description: union[0].description ?? attribute.description, - }; - return { - ...attribute, - anyOf: union, - "x-discriminator": - LlmTypeCheckerV3_1.isOneOf(props.schema) && - props.schema.discriminator !== undefined && - props.schema.oneOf.length === union.length && - union.every( - (e) => - GeminiTypeChecker.isReference(e) || GeminiTypeChecker.isNull(e), - ) - ? props.schema.discriminator - : undefined, - }; - }; - - /* ----------------------------------------------------------- - SEPARATORS - ----------------------------------------------------------- */ - export const separateParameters = (props: { - parameters: IGeminiSchema.IParameters; - predicate: (schema: IGeminiSchema) => boolean; - convention?: (key: string, type: "llm" | "human") => string; - equals?: boolean; - }): ILlmFunction.ISeparated<"chatgpt"> => { - const convention = - props.convention ?? - ((key, type) => `${key}.${NamingConvention.capitalize(type)}`); - const [llm, human] = separateObject({ - predicate: props.predicate, - convention, - $defs: props.parameters.$defs, - schema: props.parameters, - }); - if (llm === null || human === null) - return { - llm: (llm as IGeminiSchema.IParameters | null) ?? { - type: "object", - properties: {} as Record, - required: [], - additionalProperties: false, - $defs: {}, - }, - human: human as IGeminiSchema.IParameters | null, - }; - const output: ILlmFunction.ISeparated<"chatgpt"> = { - llm: { - ...llm, - $defs: Object.fromEntries( - Object.entries(props.parameters.$defs).filter(([key]) => - key.endsWith(".Llm"), - ), - ), - additionalProperties: false, - }, - human: { - ...human, - $defs: Object.fromEntries( - Object.entries(props.parameters.$defs).filter(([key]) => - key.endsWith(".Human"), - ), - ), - additionalProperties: false, - }, - }; - for (const key of Object.keys(props.parameters.$defs)) - if (key.endsWith(".Llm") === false && key.endsWith(".Human") === false) - delete props.parameters.$defs[key]; - if (Object.keys(output.llm.properties).length !== 0) { - const components: OpenApi.IComponents = {}; - output.validate = OpenApiValidator.create({ - components, - schema: invert({ - components, - schema: output.llm, - $defs: output.llm.$defs, - }), - required: true, - equals: props.equals, - }); - } - return output; - }; - - const separateStation = (props: { - predicate: (schema: IGeminiSchema) => boolean; - convention: (key: string, type: "llm" | "human") => string; - $defs: Record; - schema: IGeminiSchema; - }): [IGeminiSchema | null, IGeminiSchema | null] => { - if (props.predicate(props.schema) === true) return [null, props.schema]; - else if ( - GeminiTypeChecker.isUnknown(props.schema) || - GeminiTypeChecker.isAnyOf(props.schema) - ) - return [props.schema, null]; - else if (GeminiTypeChecker.isObject(props.schema)) - return separateObject({ - predicate: props.predicate, - convention: props.convention, - $defs: props.$defs, - schema: props.schema, - }); - else if (GeminiTypeChecker.isArray(props.schema)) - return separateArray({ - predicate: props.predicate, - convention: props.convention, - $defs: props.$defs, - schema: props.schema, - }); - else if (GeminiTypeChecker.isReference(props.schema)) - return separateReference({ - predicate: props.predicate, - convention: props.convention, - $defs: props.$defs, - schema: props.schema, - }); - return [props.schema, null]; - }; - - const separateArray = (props: { - predicate: (schema: IGeminiSchema) => boolean; - convention: (key: string, type: "llm" | "human") => string; - $defs: Record; - schema: IGeminiSchema.IArray; - }): [IGeminiSchema.IArray | null, IGeminiSchema.IArray | null] => { - const [x, y] = separateStation({ - predicate: props.predicate, - convention: props.convention, - $defs: props.$defs, - schema: props.schema.items, - }); - return [ - x !== null - ? { - ...props.schema, - items: x, - } - : null, - y !== null - ? { - ...props.schema, - items: y, - } - : null, - ]; - }; - - const separateObject = (props: { - $defs: Record; - predicate: (schema: IGeminiSchema) => boolean; - convention: (key: string, type: "llm" | "human") => string; - schema: IGeminiSchema.IObject; - }): [IGeminiSchema.IObject | null, IGeminiSchema.IObject | null] => { - // EMPTY OBJECT - if ( - Object.keys(props.schema.properties ?? {}).length === 0 && - !!props.schema.additionalProperties === false - ) - return [props.schema, null]; - - const llm = { - ...props.schema, - properties: {} as Record, - additionalProperties: props.schema.additionalProperties, - } satisfies IGeminiSchema.IObject; - const human = { - ...props.schema, - properties: {} as Record, - } satisfies IGeminiSchema.IObject; - - for (const [key, value] of Object.entries(props.schema.properties ?? {})) { - const [x, y] = separateStation({ - predicate: props.predicate, - convention: props.convention, - $defs: props.$defs, - schema: value, - }); - if (x !== null) llm.properties[key] = x; - if (y !== null) human.properties[key] = y; - } - if ( - typeof props.schema.additionalProperties === "object" && - props.schema.additionalProperties !== null - ) { - const [dx, dy] = separateStation({ - predicate: props.predicate, - convention: props.convention, - $defs: props.$defs, - schema: props.schema.additionalProperties, - }); - llm.additionalProperties = dx ?? false; - human.additionalProperties = dy ?? false; - } - return [ - !!Object.keys(llm.properties).length || !!llm.additionalProperties - ? shrinkRequired(llm) - : null, - !!Object.keys(human.properties).length || human.additionalProperties - ? shrinkRequired(human) - : null, - ]; - }; - - const separateReference = (props: { - predicate: (schema: IGeminiSchema) => boolean; - convention: (key: string, type: "llm" | "human") => string; - $defs: Record; - schema: IGeminiSchema.IReference; - }): [IGeminiSchema.IReference | null, IGeminiSchema.IReference | null] => { - const key: string = props.schema.$ref.split("#/$defs/")[1]; - const humanKey: string = props.convention(key, "human"); - const llmKey: string = props.convention(key, "llm"); - - // FIND EXISTING - if (props.$defs?.[humanKey] || props.$defs?.[llmKey]) - return [ - props.$defs?.[llmKey] - ? { - ...props.schema, - $ref: `#/$defs/${llmKey}`, - } - : null, - props.$defs?.[humanKey] - ? { - ...props.schema, - $ref: `#/$defs/${humanKey}`, - } - : null, - ]; - - // PRE-ASSIGNMENT - props.$defs![llmKey] = {}; - props.$defs![humanKey] = {}; - - // DO COMPOSE - const schema: IGeminiSchema = props.$defs?.[key]!; - const [llm, human] = separateStation({ - predicate: props.predicate, - convention: props.convention, - $defs: props.$defs, - schema, - }); - if (llm !== null) Object.assign(props.$defs[llmKey], llm); - if (human !== null) Object.assign(props.$defs[humanKey], human); - - // ONLY ONE - if (llm === null || human === null) { - delete props.$defs[llmKey]; - delete props.$defs[humanKey]; - return llm === null ? [null, props.schema] : [props.schema, null]; - } - - // BOTH OF THEM - return [ - llm !== null - ? { - ...props.schema, - $ref: `#/$defs/${llmKey}`, - } - : null, - human !== null - ? { - ...props.schema, - $ref: `#/$defs/${humanKey}`, - } - : null, - ]; - }; - - const shrinkRequired = (s: IGeminiSchema.IObject): IGeminiSchema.IObject => { - s.required = s.required.filter((key) => s.properties?.[key] !== undefined); - return s; - }; - - /* ----------------------------------------------------------- - INVERTERS - ----------------------------------------------------------- */ - export const invert = (props: { - components: OpenApi.IComponents; - schema: IGeminiSchema; - $defs: Record; - }): OpenApi.IJsonSchema => { - const union: OpenApi.IJsonSchema[] = []; - const attribute: IJsonSchemaAttribute = { - title: props.schema.title, - description: props.schema.description, - deprecated: props.schema.deprecated, - readOnly: props.schema.readOnly, - writeOnly: props.schema.writeOnly, - example: props.schema.example, - examples: props.schema.examples, - ...Object.fromEntries( - Object.entries(props.schema).filter( - ([key, value]) => key.startsWith("x-") && value !== undefined, - ), - ), - }; - - const next = (schema: IGeminiSchema): OpenApi.IJsonSchema => - invert({ - components: props.components, - $defs: props.$defs, - schema, - }); - const visit = (schema: IGeminiSchema): void => { - if (GeminiTypeChecker.isArray(schema)) - union.push({ - ...schema, - ...LlmDescriptionInverter.array(schema.description), - items: next(schema.items), - }); - else if (GeminiTypeChecker.isObject(schema)) - union.push({ - ...schema, - properties: Object.fromEntries( - Object.entries(schema.properties).map(([key, value]) => [ - key, - next(value), - ]), - ), - additionalProperties: - typeof schema.additionalProperties === "object" && - schema.additionalProperties !== null - ? next(schema.additionalProperties) - : schema.additionalProperties, - }); - else if (GeminiTypeChecker.isAnyOf(schema)) schema.anyOf.forEach(visit); - else if (GeminiTypeChecker.isReference(schema)) { - const key: string = schema.$ref.split("#/$defs/")[1]; - if (props.components.schemas?.[key] === undefined) { - props.components.schemas ??= {}; - props.components.schemas[key] = {}; - props.components.schemas[key] = next(props.$defs[key] ?? {}); - } - union.push({ - ...schema, - $ref: `#/components/schemas/${key}`, - }); - } else if (GeminiTypeChecker.isBoolean(schema)) - if (!!schema.enum?.length) - schema.enum.forEach((v) => - union.push({ - const: v, - }), - ); - else union.push(schema); - else if ( - GeminiTypeChecker.isInteger(schema) || - GeminiTypeChecker.isNumber(schema) - ) - if (!!schema.enum?.length) - schema.enum.forEach((v) => - union.push({ - const: v, - }), - ); - else - union.push({ - ...schema, - ...LlmDescriptionInverter.numeric(schema.description), - ...{ enum: undefined }, - }); - else if (GeminiTypeChecker.isString(schema)) - if (!!schema.enum?.length) - schema.enum.forEach((v) => - union.push({ - const: v, - }), - ); - else - union.push({ - ...schema, - ...LlmDescriptionInverter.string(schema.description), - ...{ enum: undefined }, - }); - else - union.push({ - ...schema, - }); - }; - visit(props.schema); - - return { - ...attribute, - ...(union.length === 0 - ? { type: undefined } - : union.length === 1 - ? { ...union[0] } - : { - oneOf: union.map((u) => ({ ...u, nullable: undefined })), - discriminator: - GeminiTypeChecker.isAnyOf(props.schema) && - props.schema["x-discriminator"] !== undefined - ? { - property: props.schema["x-discriminator"], - mapping: - props.schema["x-discriminator"].mapping !== undefined - ? Object.fromEntries( - Object.entries( - props.schema["x-discriminator"].mapping, - ).map(([key, value]) => [ - key, - `#/components/schemas/${value.split("/").at(-1)}`, - ]), - ) - : undefined, - } - : undefined, - }), - } satisfies OpenApi.IJsonSchema; - }; -} diff --git a/src/composers/llm/LlmSchemaV3Composer.ts b/src/composers/llm/LlmSchemaV3Composer.ts deleted file mode 100644 index bf395bbe..00000000 --- a/src/composers/llm/LlmSchemaV3Composer.ts +++ /dev/null @@ -1,340 +0,0 @@ -import { OpenApi } from "../../OpenApi"; -import { OpenApiV3Downgrader } from "../../converters/OpenApiV3Downgrader"; -import { OpenApiV3Upgrader } from "../../converters/OpenApiV3Upgrader"; -import { ILlmFunction } from "../../structures/ILlmFunction"; -import { ILlmSchemaV3 } from "../../structures/ILlmSchemaV3"; -import { IOpenApiSchemaError } from "../../structures/IOpenApiSchemaError"; -import { IResult } from "../../structures/IResult"; -import { LlmTypeCheckerV3 } from "../../utils/LlmTypeCheckerV3"; -import { OpenApiConstraintShifter } from "../../utils/OpenApiConstraintShifter"; -import { OpenApiTypeChecker } from "../../utils/OpenApiTypeChecker"; -import { OpenApiValidator } from "../../utils/OpenApiValidator"; -import { LlmDescriptionInverter } from "./LlmDescriptionInverter"; -import { LlmParametersFinder } from "./LlmParametersComposer"; - -export namespace LlmSchemaV3Composer { - /** @internal */ - export const IS_DEFS = false; - - export const DEFAULT_CONFIG: ILlmSchemaV3.IConfig = { - recursive: 3, - constraint: true, - }; - - /* ----------------------------------------------------------- - CONVERTERS - ----------------------------------------------------------- */ - export const parameters = (props: { - config: ILlmSchemaV3.IConfig; - components: OpenApi.IComponents; - schema: OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference; - /** @internal */ - validate?: ( - schema: OpenApi.IJsonSchema, - accessor: string, - ) => IOpenApiSchemaError.IReason[]; - accessor?: string; - refAccessor?: string; - }): IResult => { - const entity: IResult = - LlmParametersFinder.parameters({ - ...props, - method: "LlmSchemaV3Composer.parameters", - }); - if (entity.success === false) return entity; - - const result: IResult = schema(props); - if (result.success === false) return result; - return { - success: true, - value: { - ...(result.value as ILlmSchemaV3.IObject), - additionalProperties: false, - } satisfies ILlmSchemaV3.IParameters, - }; - }; - - export const schema = (props: { - config: ILlmSchemaV3.IConfig; - components: OpenApi.IComponents; - schema: OpenApi.IJsonSchema; - /** @internal */ - validate?: ( - schema: OpenApi.IJsonSchema, - accessor: string, - ) => IOpenApiSchemaError.IReason[]; - accessor?: string; - refAccessor?: string; - }): IResult => { - // CHECK TUPLE TYPE - const reasons: IOpenApiSchemaError.IReason[] = []; - OpenApiTypeChecker.visit({ - closure: (next, accessor) => { - if (props.validate) reasons.push(...props.validate(next, accessor)); - if (OpenApiTypeChecker.isTuple(next)) - reasons.push({ - accessor: accessor, - schema: next, - message: "LLM does not allow tuple type.", - }); - else if (OpenApiTypeChecker.isReference(next)) { - // UNABLE TO FIND MATCHED REFERENCE - const key = next.$ref.split("#/components/schemas/")[1]; - if (props.components.schemas?.[key] === undefined) { - reasons.push({ - schema: next, - message: `${accessor}: unable to find reference type ${JSON.stringify(key)}.`, - accessor: accessor, - }); - } - } - }, - components: props.components, - schema: props.schema, - accessor: props.accessor, - refAccessor: props.refAccessor, - }); - // if ((valid as boolean) === false) return null; - if (reasons.length > 0) - return { - success: false, - error: { - method: "LlmSchemaV3Composer.schema", - message: "Failed to compose LLM schema of v3", - reasons, - }, - }; - - // CHECK MISMATCHES - const escaped: IResult = - OpenApiTypeChecker.escape({ - ...props, - recursive: props.config.recursive, - }); - if (escaped.success === false) - // UNREACHABLE - return { - success: false, - error: { - method: "LlmSchemaV3Composer.schema", - message: "Failed to compose LLM schema of v3", - reasons: escaped.error.reasons, - }, - }; - - // SPECIALIZATIONS - const downgraded: ILlmSchemaV3 = OpenApiV3Downgrader.downgradeSchema({ - original: { - schemas: {}, - }, - downgraded: {}, - })(escaped.value) as ILlmSchemaV3; - LlmTypeCheckerV3.visit({ - closure: (next) => { - if ( - LlmTypeCheckerV3.isOneOf(next) && - (next as any).discriminator !== undefined - ) - delete (next as any).discriminator; - else if (LlmTypeCheckerV3.isObject(next)) { - next.properties ??= {}; - next.required ??= []; - } - if (props.config.constraint === false) { - if ( - LlmTypeCheckerV3.isInteger(next) || - LlmTypeCheckerV3.isNumber(next) - ) - OpenApiConstraintShifter.shiftNumeric( - next as - | OpenApi.IJsonSchema.IInteger - | OpenApi.IJsonSchema.INumber, - ); - else if (LlmTypeCheckerV3.isString(next)) - OpenApiConstraintShifter.shiftString( - next as OpenApi.IJsonSchema.IString, - ); - else if (LlmTypeCheckerV3.isArray(next)) - OpenApiConstraintShifter.shiftArray( - next as OpenApi.IJsonSchema.IArray, - ); - } - }, - schema: downgraded, - }); - return { - success: true, - value: downgraded, - }; - }; - - /* ----------------------------------------------------------- - SEPARATORS - ----------------------------------------------------------- */ - export const separateParameters = (props: { - predicate: (schema: ILlmSchemaV3) => boolean; - parameters: ILlmSchemaV3.IParameters; - equals?: boolean; - }): ILlmFunction.ISeparated<"3.0"> => { - const [llm, human] = separateObject({ - predicate: props.predicate, - schema: props.parameters, - }); - return { - llm: (llm as ILlmSchemaV3.IParameters | null) ?? { - type: "object", - properties: {}, - additionalProperties: false, - required: [], - }, - human: human as ILlmSchemaV3.IParameters | null, - validate: llm - ? OpenApiValidator.create({ - components: {}, - schema: invert({ schema: llm }), - required: true, - equals: props.equals, - }) - : undefined, - }; - }; - - const separateStation = (props: { - predicate: (schema: ILlmSchemaV3) => boolean; - schema: ILlmSchemaV3; - }): [ILlmSchemaV3 | null, ILlmSchemaV3 | null] => { - if (props.predicate(props.schema) === true) return [null, props.schema]; - else if ( - LlmTypeCheckerV3.isUnknown(props.schema) || - LlmTypeCheckerV3.isOneOf(props.schema) - ) - return [props.schema, null]; - else if (LlmTypeCheckerV3.isObject(props.schema)) - return separateObject({ - predicate: props.predicate, - schema: props.schema, - }); - else if (LlmTypeCheckerV3.isArray(props.schema)) - return separateArray({ - predicate: props.predicate, - schema: props.schema, - }); - return [props.schema, null]; - }; - - const separateArray = (props: { - predicate: (schema: ILlmSchemaV3) => boolean; - schema: ILlmSchemaV3.IArray; - }): [ILlmSchemaV3.IArray | null, ILlmSchemaV3.IArray | null] => { - const [x, y] = separateStation({ - predicate: props.predicate, - schema: props.schema.items, - }); - return [ - x !== null - ? { - ...props.schema, - items: x, - } - : null, - y !== null - ? { - ...props.schema, - items: y, - } - : null, - ]; - }; - - const separateObject = (props: { - predicate: (schema: ILlmSchemaV3) => boolean; - schema: ILlmSchemaV3.IObject; - }): [ILlmSchemaV3.IObject | null, ILlmSchemaV3.IObject | null] => { - // EMPTY OBJECT - if ( - Object.keys(props.schema.properties ?? {}).length === 0 && - !!props.schema.additionalProperties === false - ) - return [props.schema, null]; - - const llm = { - ...props.schema, - properties: {} as Record, - additionalProperties: props.schema.additionalProperties, - } satisfies ILlmSchemaV3.IObject; - const human = { - ...props.schema, - properties: {} as Record, - additionalProperties: props.schema.additionalProperties, - } satisfies ILlmSchemaV3.IObject; - - for (const [key, value] of Object.entries(props.schema.properties ?? {})) { - const [x, y] = separateStation({ - predicate: props.predicate, - schema: value, - }); - if (x !== null) llm.properties[key] = x; - if (y !== null) human.properties[key] = y; - } - if ( - typeof props.schema.additionalProperties === "object" && - props.schema.additionalProperties !== null - ) { - const [dx, dy] = separateStation({ - predicate: props.predicate, - schema: props.schema.additionalProperties, - }); - llm.additionalProperties = dx ?? false; - human.additionalProperties = dy ?? false; - } - return [ - !!Object.keys(llm.properties).length || !!llm.additionalProperties - ? shrinkRequired(llm) - : null, - !!Object.keys(human.properties).length || !!human.additionalProperties - ? shrinkRequired(human) - : null, - ]; - }; - - const shrinkRequired = (s: ILlmSchemaV3.IObject): ILlmSchemaV3.IObject => { - s.required = s.required.filter((key) => s.properties[key] !== undefined); - return s; - }; - - /* ----------------------------------------------------------- - INVERTERS - ----------------------------------------------------------- */ - export const invert = (props: { - schema: ILlmSchemaV3; - }): OpenApi.IJsonSchema => { - const upgraded: OpenApi.IJsonSchema = OpenApiV3Upgrader.convertSchema({})( - props.schema, - ); - OpenApiTypeChecker.visit({ - closure: (schema) => { - if (OpenApiTypeChecker.isArray(schema)) - Object.assign(schema, { - ...schema, - ...LlmDescriptionInverter.array(schema.description), - }); - else if ( - OpenApiTypeChecker.isInteger(schema) || - OpenApiTypeChecker.isNumber(schema) - ) - Object.assign(schema, { - ...schema, - ...LlmDescriptionInverter.numeric(schema.description), - }); - else if (OpenApiTypeChecker.isString(schema)) - Object.assign(schema, { - ...schema, - ...LlmDescriptionInverter.string(schema.description), - }); - }, - components: {}, - schema: upgraded, - }); - return upgraded; - }; -} diff --git a/src/composers/llm/LlmSchemaV3_1Composer.ts b/src/composers/llm/LlmSchemaV3_1Composer.ts deleted file mode 100644 index cda921ee..00000000 --- a/src/composers/llm/LlmSchemaV3_1Composer.ts +++ /dev/null @@ -1,716 +0,0 @@ -import { OpenApi } from "../../OpenApi"; -import { IJsonSchemaAttribute } from "../../structures/IJsonSchemaAttribute"; -import { ILlmFunction } from "../../structures/ILlmFunction"; -import { ILlmSchemaV3_1 } from "../../structures/ILlmSchemaV3_1"; -import { IOpenApiSchemaError } from "../../structures/IOpenApiSchemaError"; -import { IResult } from "../../structures/IResult"; -import { LlmTypeCheckerV3_1 } from "../../utils/LlmTypeCheckerV3_1"; -import { NamingConvention } from "../../utils/NamingConvention"; -import { OpenApiConstraintShifter } from "../../utils/OpenApiConstraintShifter"; -import { OpenApiTypeChecker } from "../../utils/OpenApiTypeChecker"; -import { OpenApiValidator } from "../../utils/OpenApiValidator"; -import { JsonDescriptionUtil } from "../../utils/internal/JsonDescriptionUtil"; -import { LlmDescriptionInverter } from "./LlmDescriptionInverter"; -import { LlmParametersFinder } from "./LlmParametersComposer"; - -export namespace LlmSchemaV3_1Composer { - /** @internal */ - export const IS_DEFS = true; - - export const DEFAULT_CONFIG: ILlmSchemaV3_1.IConfig = { - reference: true, - constraint: true, - }; - - /* ----------------------------------------------------------- - CONVERTERS - ----------------------------------------------------------- */ - export const parameters = (props: { - config: ILlmSchemaV3_1.IConfig; - components: OpenApi.IComponents; - schema: OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference; - errors?: string[]; - /** @internal */ - validate?: ( - input: OpenApi.IJsonSchema, - accessor: string, - ) => IOpenApiSchemaError.IReason[]; - accessor?: string; - refAccessor?: string; - }): IResult => { - const entity: IResult = - LlmParametersFinder.parameters({ - ...props, - method: "LlmSchemaV3_1Composer.parameters", - }); - if (entity.success === false) return entity; - - const $defs: Record = {}; - const result: IResult = schema({ - ...props, - $defs, - schema: entity.value, - }); - if (result.success === false) return result; - return { - success: true, - value: { - ...(result.value as ILlmSchemaV3_1.IObject), - additionalProperties: false, - $defs, - description: OpenApiTypeChecker.isReference(props.schema) - ? JsonDescriptionUtil.cascade({ - prefix: "#/components/schemas/", - components: props.components, - schema: props.schema, - escape: true, - }) - : result.value.description, - } satisfies ILlmSchemaV3_1.IParameters, - }; - }; - - export const schema = (props: { - config: ILlmSchemaV3_1.IConfig; - components: OpenApi.IComponents; - $defs: Record; - schema: OpenApi.IJsonSchema; - /** @internal */ - validate?: ( - input: OpenApi.IJsonSchema, - accessor: string, - ) => IOpenApiSchemaError.IReason[]; - accessor?: string; - refAccessor?: string; - }): IResult => { - const union: Array = []; - const attribute: IJsonSchemaAttribute = { - title: props.schema.title, - description: props.schema.description, - deprecated: props.schema.deprecated, - readOnly: props.schema.readOnly, - writeOnly: props.schema.writeOnly, - example: props.schema.example, - examples: props.schema.examples, - ...Object.fromEntries( - Object.entries(props.schema).filter( - ([key, value]) => key.startsWith("x-") && value !== undefined, - ), - ), - }; - - const reasons: IOpenApiSchemaError.IReason[] = []; - OpenApiTypeChecker.visit({ - closure: (next, accessor) => { - if (props.validate) { - // CUSTOM VALIDATION - reasons.push(...props.validate(next, accessor)); - } - if (OpenApiTypeChecker.isTuple(next)) - reasons.push({ - schema: next, - accessor: accessor, - message: `LLM does not allow tuple type.`, - }); - else if (OpenApiTypeChecker.isReference(next)) { - // UNABLE TO FIND MATCHED REFERENCE - const key = next.$ref.split("#/components/schemas/")[1]; - if (props.components.schemas?.[key] === undefined) - reasons.push({ - schema: next, - accessor: accessor, - message: `unable to find reference type ${JSON.stringify(key)}.`, - }); - } - }, - components: props.components, - schema: props.schema, - accessor: props.accessor, - refAccessor: props.refAccessor, - }); - if (reasons.length > 0) - return { - success: false, - error: { - method: "LlmSchemaV3_1Composer.schema", - message: "Failed to compose LLM schema of v3.1", - reasons, - }, - }; - - const visit = (input: OpenApi.IJsonSchema, accessor: string): number => { - if (OpenApiTypeChecker.isOneOf(input)) { - // UNION TYPE - input.oneOf.forEach((s, i) => visit(s, `${accessor}.oneOf[${i}]`)); - return 0; - } else if (OpenApiTypeChecker.isReference(input)) { - // REFERENCE TYPE - const key: string = input.$ref.split("#/components/schemas/")[1]; - const target: OpenApi.IJsonSchema | undefined = - props.components.schemas?.[key]; - if (target === undefined) - return union.push(null); // UNREACHABLEE - else if ( - // KEEP THE REFERENCE TYPE - props.config.reference === true || - OpenApiTypeChecker.isRecursiveReference({ - components: props.components, - schema: input, - }) - ) { - const out = () => - union.push({ - ...input, - $ref: `#/$defs/${key}`, - }); - if (props.$defs[key] !== undefined) return out(); - props.$defs[key] = {}; - const converted: IResult = - schema({ - config: props.config, - components: props.components, - $defs: props.$defs, - schema: target, - refAccessor: props.refAccessor, - accessor: `${props.refAccessor ?? "$def"}[${JSON.stringify(key)}]`, - }); - if (converted.success === false) return union.push(null); // UNREACHABLE - props.$defs[key] = converted.value; - return out(); - } else { - // DISCARD THE REFERENCE TYPE - const length: number = union.length; - visit(target, accessor); - if (length === union.length - 1 && union[union.length - 1] !== null) - union[union.length - 1] = { - ...union[union.length - 1]!, - description: JsonDescriptionUtil.cascade({ - prefix: "#/components/schemas/", - components: props.components, - schema: input, - escape: true, - }), - }; - else - attribute.description = JsonDescriptionUtil.cascade({ - prefix: "#/components/schemas/", - components: props.components, - schema: input, - escape: true, - }); - return union.length; - } - } else if (OpenApiTypeChecker.isObject(input)) { - // OBJECT TYPE - const properties: Record = - Object.entries(input.properties ?? {}).reduce( - (acc, [key, value]) => { - const converted: IResult = - schema({ - config: props.config, - components: props.components, - $defs: props.$defs, - schema: value, - refAccessor: props.refAccessor, - accessor: `${accessor}.properties[${JSON.stringify(key)}]`, - }); - acc[key] = converted.success ? converted.value : null; - if (converted.success === false) - reasons.push(...converted.error.reasons); - return acc; - }, - {} as Record, - ); - if (Object.values(properties).some((v) => v === null)) - return union.push(null); - const additionalProperties: - | ILlmSchemaV3_1 - | boolean - | null - | undefined = (() => { - if ( - typeof input.additionalProperties === "object" && - input.additionalProperties !== null - ) { - const converted: IResult = - schema({ - config: props.config, - components: props.components, - $defs: props.$defs, - schema: input.additionalProperties, - refAccessor: props.refAccessor, - accessor: `${accessor}.additionalProperties`, - }); - if (converted.success === false) { - reasons.push(...converted.error.reasons); - return null; - } - return converted.value; - } - return input.additionalProperties; - })(); - if (additionalProperties === null) return union.push(null); - return union.push({ - ...input, - properties: properties as Record, - additionalProperties, - required: input.required ?? [], - }); - } else if (OpenApiTypeChecker.isArray(input)) { - const items: IResult = schema({ - config: props.config, - components: props.components, - $defs: props.$defs, - schema: input.items, - refAccessor: props.refAccessor, - accessor: `${accessor}.items`, - }); - if (items.success === false) { - reasons.push(...items.error.reasons); - return union.push(null); - } - return union.push( - (props.config.constraint - ? (x: ILlmSchemaV3_1.IArray) => x - : (x: ILlmSchemaV3_1.IArray) => - OpenApiConstraintShifter.shiftArray(x))({ - ...input, - items: items.value, - }), - ); - } else if (OpenApiTypeChecker.isString(input)) - return union.push( - (props.config.constraint - ? (x: ILlmSchemaV3_1.IString) => x - : (x: ILlmSchemaV3_1.IString) => - OpenApiConstraintShifter.shiftString(x))({ - ...input, - }), - ); - else if ( - OpenApiTypeChecker.isNumber(input) || - OpenApiTypeChecker.isInteger(input) - ) - return union.push( - (props.config.constraint - ? (x: ILlmSchemaV3_1.INumber | ILlmSchemaV3_1.IInteger) => x - : (x: ILlmSchemaV3_1.INumber | ILlmSchemaV3_1.IInteger) => - OpenApiConstraintShifter.shiftNumeric(x))({ - ...input, - }), - ); - else if (OpenApiTypeChecker.isTuple(input)) - return union.push(null); // UNREACHABLE - else return union.push({ ...input }); - }; - visit(props.schema, props.accessor ?? "$input.schema"); - - if (union.some((u) => u === null)) - return { - success: false, - error: { - method: "LlmSchemaV3_1Composer.schema", - message: "Failed to compose LLM schema of v3.1", - reasons, - }, - }; - else if (union.length === 0) - return { - success: true, - value: { - ...attribute, - type: undefined, - }, - }; - else if (union.length === 1) - return { - success: true, - value: { - ...attribute, - ...union[0]!, - }, - }; - return { - success: true, - value: { - ...attribute, - oneOf: union.filter((u) => u !== null), - discriminator: - OpenApiTypeChecker.isOneOf(props.schema) && - props.schema.discriminator !== undefined && - union - .filter((u) => u !== null) - .every( - (e) => - LlmTypeCheckerV3_1.isReference(e) || - LlmTypeCheckerV3_1.isNull(e), - ) - ? { - propertyName: props.schema.discriminator.propertyName, - mapping: - props.schema.discriminator.mapping !== undefined - ? Object.fromEntries( - Object.entries(props.schema.discriminator.mapping).map( - ([key, value]) => [ - key, - `#/$defs/${value.split("/").at(-1)}`, - ], - ), - ) - : undefined, - } - : undefined, - }, - }; - }; - - /* ----------------------------------------------------------- - SEPARATORS - ----------------------------------------------------------- */ - export const separateParameters = (props: { - parameters: ILlmSchemaV3_1.IParameters; - predicate: (schema: ILlmSchemaV3_1) => boolean; - convention?: (key: string, type: "llm" | "human") => string; - equals?: boolean; - }): ILlmFunction.ISeparated<"3.1"> => { - const convention = - props.convention ?? - ((key, type) => `${key}.${NamingConvention.capitalize(type)}`); - const [llm, human] = separateObject({ - $defs: props.parameters.$defs, - schema: props.parameters, - predicate: props.predicate, - convention, - }); - if (llm === null || human === null) - return { - llm: (llm as ILlmSchemaV3_1.IParameters | null) ?? { - type: "object", - properties: {}, - additionalProperties: false, - required: [], - $defs: {}, - }, - human: human as ILlmSchemaV3_1.IParameters | null, - }; - const output: ILlmFunction.ISeparated<"3.1"> = { - llm: { - ...llm, - $defs: Object.fromEntries( - Object.entries(props.parameters.$defs).filter(([key]) => - key.endsWith(".Llm"), - ), - ), - additionalProperties: false, - }, - human: { - ...human, - $defs: Object.fromEntries( - Object.entries(props.parameters.$defs).filter(([key]) => - key.endsWith(".Human"), - ), - ), - additionalProperties: false, - }, - }; - for (const key of Object.keys(props.parameters.$defs)) - if (key.endsWith(".Llm") === false && key.endsWith(".Human") === false) - delete props.parameters.$defs[key]; - if (Object.keys(output.llm.properties).length !== 0) { - const components: OpenApi.IComponents = {}; - output.validate = OpenApiValidator.create({ - components, - schema: invert({ - components, - schema: output.llm, - $defs: output.llm.$defs, - }), - required: true, - equals: props.equals, - }); - } - return output; - }; - - const separateStation = (props: { - predicate: (schema: ILlmSchemaV3_1) => boolean; - convention: (key: string, type: "llm" | "human") => string; - $defs: Record; - schema: ILlmSchemaV3_1; - }): [ILlmSchemaV3_1 | null, ILlmSchemaV3_1 | null] => { - if (props.predicate(props.schema) === true) return [null, props.schema]; - else if ( - LlmTypeCheckerV3_1.isUnknown(props.schema) || - LlmTypeCheckerV3_1.isOneOf(props.schema) - ) - return [props.schema, null]; - else if (LlmTypeCheckerV3_1.isObject(props.schema)) - return separateObject({ - predicate: props.predicate, - convention: props.convention, - $defs: props.$defs, - schema: props.schema, - }); - else if (LlmTypeCheckerV3_1.isArray(props.schema)) - return separateArray({ - predicate: props.predicate, - convention: props.convention, - $defs: props.$defs, - schema: props.schema, - }); - else if (LlmTypeCheckerV3_1.isReference(props.schema)) - return separateReference({ - predicate: props.predicate, - convention: props.convention, - $defs: props.$defs, - schema: props.schema, - }); - return [props.schema, null]; - }; - - const separateArray = (props: { - predicate: (schema: ILlmSchemaV3_1) => boolean; - convention: (key: string, type: "llm" | "human") => string; - $defs: Record; - schema: ILlmSchemaV3_1.IArray; - }): [ILlmSchemaV3_1.IArray | null, ILlmSchemaV3_1.IArray | null] => { - const [x, y] = separateStation({ - predicate: props.predicate, - convention: props.convention, - $defs: props.$defs, - schema: props.schema.items, - }); - return [ - x !== null - ? { - ...props.schema, - items: x, - } - : null, - y !== null - ? { - ...props.schema, - items: y, - } - : null, - ]; - }; - - const separateObject = (props: { - predicate: (schema: ILlmSchemaV3_1) => boolean; - convention: (key: string, type: "llm" | "human") => string; - $defs: Record; - schema: ILlmSchemaV3_1.IObject; - }): [ILlmSchemaV3_1.IObject | null, ILlmSchemaV3_1.IObject | null] => { - // EMPTY OBJECT - if ( - Object.keys(props.schema.properties ?? {}).length === 0 && - !!props.schema.additionalProperties === false - ) - return [props.schema, null]; - - const llm = { - ...props.schema, - properties: {} as Record, - additionalProperties: props.schema.additionalProperties, - } satisfies ILlmSchemaV3_1.IObject; - const human = { - ...props.schema, - properties: {} as Record, - } satisfies ILlmSchemaV3_1.IObject; - - for (const [key, value] of Object.entries(props.schema.properties ?? {})) { - const [x, y] = separateStation({ - predicate: props.predicate, - convention: props.convention, - $defs: props.$defs, - schema: value, - }); - if (x !== null) llm.properties[key] = x; - if (y !== null) human.properties[key] = y; - } - if ( - typeof props.schema.additionalProperties === "object" && - props.schema.additionalProperties !== null - ) { - const [dx, dy] = separateStation({ - predicate: props.predicate, - convention: props.convention, - $defs: props.$defs, - schema: props.schema.additionalProperties, - }); - llm.additionalProperties = dx ?? false; - human.additionalProperties = dy ?? false; - } - return [ - !!Object.keys(llm.properties).length || !!llm.additionalProperties - ? shrinkRequired(llm) - : null, - !!Object.keys(human.properties).length || human.additionalProperties - ? shrinkRequired(human) - : null, - ]; - }; - - const separateReference = (props: { - predicate: (schema: ILlmSchemaV3_1) => boolean; - convention: (key: string, type: "llm" | "human") => string; - $defs: Record; - schema: ILlmSchemaV3_1.IReference; - }): [ILlmSchemaV3_1.IReference | null, ILlmSchemaV3_1.IReference | null] => { - const key: string = props.schema.$ref.split("#/$defs/")[1]; - const humanKey: string = props.convention(key, "human"); - const llmKey: string = props.convention(key, "llm"); - - // FIND EXISTING - if (props.$defs?.[humanKey] || props.$defs?.[llmKey]) - return [ - props.$defs?.[llmKey] - ? { - ...props.schema, - $ref: `#/$defs/${llmKey}`, - } - : null, - props.$defs?.[humanKey] - ? { - ...props.schema, - $ref: `#/$defs/${humanKey}`, - } - : null, - ]; - - // PRE-ASSIGNMENT - props.$defs![llmKey] = {}; - props.$defs![humanKey] = {}; - - // DO COMPOSE - const schema: ILlmSchemaV3_1 = props.$defs?.[key]!; - const [llm, human] = separateStation({ - predicate: props.predicate, - convention: props.convention, - $defs: props.$defs, - schema, - }); - if (llm !== null) Object.assign(props.$defs[llmKey], llm); - if (human !== null) Object.assign(props.$defs[humanKey], human); - - // ONLY ONE - if (llm === null || human === null) { - delete props.$defs[llmKey]; - delete props.$defs[humanKey]; - return llm === null ? [null, props.schema] : [props.schema, null]; - } - - // BOTH OF THEM - return [ - llm !== null - ? { - ...props.schema, - $ref: `#/$defs/${llmKey}`, - } - : null, - human !== null - ? { - ...props.schema, - $ref: `#/$defs/${humanKey}`, - } - : null, - ]; - }; - - const shrinkRequired = ( - s: ILlmSchemaV3_1.IObject, - ): ILlmSchemaV3_1.IObject => { - if (s.required !== undefined) - s.required = s.required.filter( - (key) => s.properties?.[key] !== undefined, - ); - return s; - }; - - /* ----------------------------------------------------------- - INVERTERS - ----------------------------------------------------------- */ - export const invert = (props: { - components: OpenApi.IComponents; - schema: ILlmSchemaV3_1; - $defs: Record; - }): OpenApi.IJsonSchema => { - const next = (schema: ILlmSchemaV3_1): OpenApi.IJsonSchema => - invert({ - components: props.components, - $defs: props.$defs, - schema, - }); - if (LlmTypeCheckerV3_1.isArray(props.schema)) - return { - ...props.schema, - ...LlmDescriptionInverter.array(props.schema.description), - items: next(props.schema.items), - }; - else if (LlmTypeCheckerV3_1.isObject(props.schema)) - return { - ...props.schema, - properties: props.schema.properties - ? Object.fromEntries( - Object.entries(props.schema.properties).map(([key, value]) => [ - key, - next(value), - ]), - ) - : undefined, - additionalProperties: - typeof props.schema.additionalProperties === "object" && - props.schema.additionalProperties !== null - ? next(props.schema.additionalProperties) - : props.schema.additionalProperties, - }; - else if (LlmTypeCheckerV3_1.isReference(props.schema)) { - const key: string = props.schema.$ref.split("#/$defs/").at(-1) ?? ""; - if (props.components.schemas?.[key] === undefined) { - props.components.schemas ??= {}; - props.components.schemas[key] = {}; - props.components.schemas[key] = next(props.$defs[key] ?? {}); - } - return { - ...props.schema, - $ref: `#/components/schemas/${key}`, - }; - } else if (LlmTypeCheckerV3_1.isOneOf(props.schema)) - return { - ...props.schema, - oneOf: props.schema.oneOf.map(next), - discriminator: - props.schema.discriminator !== undefined - ? { - propertyName: props.schema.discriminator.propertyName, - mapping: - props.schema.discriminator.mapping !== undefined - ? Object.fromEntries( - Object.entries(props.schema.discriminator.mapping).map( - ([key, value]) => [ - key, - `#/components/schemas/${value.split("/").at(-1)}`, - ], - ), - ) - : undefined, - } - : undefined, - }; - else if ( - LlmTypeCheckerV3_1.isInteger(props.schema) || - LlmTypeCheckerV3_1.isNumber(props.schema) - ) - return { - ...props.schema, - ...LlmDescriptionInverter.numeric(props.schema.description), - }; - else if (LlmTypeCheckerV3_1.isString(props.schema)) - return { - ...props.schema, - ...LlmDescriptionInverter.string(props.schema.description), - }; - return props.schema; - }; -} diff --git a/src/http/HttpLlmFunctionFetcher.ts b/src/http/HttpLlmFunctionFetcher.ts index 9e91a533..25d400a5 100644 --- a/src/http/HttpLlmFunctionFetcher.ts +++ b/src/http/HttpLlmFunctionFetcher.ts @@ -2,25 +2,20 @@ import type { HttpLlm } from "../HttpLlm"; import type { HttpMigration } from "../HttpMigration"; import { IHttpMigrateRoute } from "../structures/IHttpMigrateRoute"; import { IHttpResponse } from "../structures/IHttpResponse"; -import { ILlmSchema } from "../structures/ILlmSchema"; import { HttpMigrateRouteFetcher } from "./HttpMigrateRouteFetcher"; export namespace HttpLlmFunctionFetcher { - export const execute = ( - props: HttpLlm.IFetchProps, - ): Promise => - HttpMigrateRouteFetcher.execute(getFetchArguments("execute", props)); + export const execute = (props: HttpLlm.IFetchProps): Promise => + HttpMigrateRouteFetcher.execute(getFetchArguments("execute", props)); - export const propagate = ( - props: HttpLlm.IFetchProps, + export const propagate = ( + props: HttpLlm.IFetchProps, ): Promise => - HttpMigrateRouteFetcher.propagate( - getFetchArguments("propagate", props), - ); + HttpMigrateRouteFetcher.propagate(getFetchArguments("propagate", props)); - const getFetchArguments = ( + const getFetchArguments = ( from: string, - props: HttpLlm.IFetchProps, + props: HttpLlm.IFetchProps, ): HttpMigration.IFetchProps => { const route: IHttpMigrateRoute = props.function.route(); const input: Record = props.input; diff --git a/src/index.ts b/src/index.ts index 963e6a0c..367cce13 100644 --- a/src/index.ts +++ b/src/index.ts @@ -29,48 +29,25 @@ export * from "./HttpMigration"; //---- // LLM //---- -// VALIDATIONS -export * from "./structures/IOpenApiSchemaError"; -export * from "./structures/IResult"; -export * from "./structures/IValidation"; - // CONTROLLERS export * from "./structures/IHttpLlmController"; -export * from "./structures/ILlmController"; -export * from "./structures/IMcpLlmController"; - -// APPLICATIONS export * from "./structures/IHttpLlmApplication"; export * from "./structures/IHttpLlmFunction"; -export * from "./structures/ILlmApplication"; +export * from "./structures/ILlmController"; export * from "./structures/ILlmFunction"; +export * from "./structures/ILlmApplication"; +export * from "./structures/IMcpLlmApplication"; +export * from "./structures/IMcpLlmController"; +export * from "./structures/IMcpLlmFunction"; +export * from "./structures/IMcpTool"; -// SCHEMAS -export * from "./structures/IChatGptSchema"; -export * from "./structures/IClaudeSchema"; -export * from "./structures/IGeminiSchema"; +// SCHEMA export * from "./structures/ILlmSchema"; -export * from "./structures/ILlmSchemaV3"; -export * from "./structures/ILlmSchemaV3_1"; - -// TYPE CHECKERS -export * from "./utils/ChatGptTypeChecker"; -export * from "./utils/ClaudeTypeChecker"; -export * from "./utils/DeepSeekTypeChecker"; -export * from "./utils/GeminiTypeChecker"; -export * from "./utils/LlamaTypeChecker"; -export * from "./utils/LlmTypeCheckerV3"; -export * from "./utils/LlmTypeCheckerV3_1"; +export * from "./structures/IOpenApiSchemaError"; +export * from "./structures/IResult"; +export * from "./structures/IValidation"; // FACADE export * from "./HttpLlm"; - -//---- -// MCP -//---- export * from "./McpLlm"; - -export * from "./structures/IMcpLlmApplication"; -export * from "./structures/IMcpLlmFunction"; - -export * from "./structures/IMcpTool"; +export * from "./utils/LlmTypeChecker"; diff --git a/src/structures/IChatGptSchema.ts b/src/structures/IChatGptSchema.ts deleted file mode 100644 index 9f3f027d..00000000 --- a/src/structures/IChatGptSchema.ts +++ /dev/null @@ -1,342 +0,0 @@ -import { IJsonSchemaAttribute } from "./IJsonSchemaAttribute"; - -/** - * Type schema info for OpenAI function calling. - * - * `IChatGptSchema` is a type schema info for OpenAI function calling. The type - * name "ChatGpt" is intentionally used to avoid confusion with "OpenAPI" - * specification, even though this is designed for OpenAI models. - * - * `IChatGptSchema` basically follows the JSON schema definition of the OpenAPI - * v3.1 specification; {@link OpenApiV3_1.IJsonSchema}. However, it deviates from - * the standard JSON schema specification and omits many features when used in - * {@link IChatGptSchema.IConfig.strict} mode for OpenAI function calling. - * - * `IChatGptSchema` supports all JSON schema features through workaround - * expressions using JSDoc tags in the `description` property, so using - * `IChatGptSchema` does not degrade function calling performance even in strict - * mode. - * - * Here is the list of how `IChatGptSchema` is different with the OpenAPI v3.1 - * JSON schema: - * - * - Decompose mixed type: {@link OpenApiV3_1.IJsonSchema.IMixed} - * - Resolve nullable property: - * {@link OpenApiV3_1.IJsonSchema.__ISignificant.nullable} - * - Tuple type is banned: {@link OpenApiV3_1.IJsonSchema.ITuple.prefixItems} - * - Constant type is banned: {@link OpenApiV3_1.IJsonSchema.IConstant} - * - Merge {@link OpenApiV3_1.IJsonSchema.IOneOf} to {@link IChatGptSchema.IAnyOf} - * - Merge {@link OpenApiV3_1.IJsonSchema.IAllOf} to {@link IChatGptSchema.IObject} - * - Merge {@link OpenApiV3_1.IJsonSchema.IRecursiveReference} to - * {@link IChatGptSchema.IReference} - * - When {@link IChatGptSchema.IConfig.strict} mode: - * - * - Every object properties must be required - * - Do not allow {@link IChatGptSchema.IObject.additionalProperties} - * - * Compared to {@link OpenApi.IJsonSchema}, the emended JSON schema - * specification: - * - * - {@link IChatGptSchema.IAnyOf} instead of {@link OpenApi.IJsonSchema.IOneOf} - * - {@link IChatGptSchema.IParameters.$defs} instead of - * {@link OpenApi.IComponents.schemas} - * - {@link IChatGptSchema.IString.enum} instead of - * {@link OpenApi.IJsonSchema.IConstant} - * - {@link IChatGptSchema.additionalProperties} is fixed to `false` in strict mode - * - {@link IChatGptSchema.properties} and {@link IChatGptSchema.required} are - * always defined - * - No tuple type {@link OpenApi.IJsonSchema.ITuple} support - * - When {@link IChatGptSchema.IConfig.strict} mode: - * - * - Every object properties must be required - * - Do not allow {@link IChatGptSchema.IObject.additionalProperties} - * - * For reference, if you compose the `IChatGptSchema` type with the - * {@link IChatGptSchema.IConfig.reference} `false` option (default is `false`), - * only recursively named types are archived into the - * {@link IChatGptSchema.IParameters.$defs}, and others are escaped from the - * {@link IChatGptSchema.IReference} type. - * - * Also, OpenAI has banned the following constraint properties. Instead, - * `IChatGptSchema` fills the {@link IChatGptSchema.description} property with - * workaround expressions using JSDoc tags like `"@format uuid"` to convey these - * constraints: - * - * - {@link OpenApi.IJsonSchema.INumber.minimum} - * - {@link OpenApi.IJsonSchema.INumber.maximum} - * - {@link OpenApi.IJsonSchema.INumber.multipleOf} - * - {@link OpenApi.IJsonSchema.IString.minLength} - * - {@link OpenApi.IJsonSchema.IString.maxLength} - * - {@link OpenApi.IJsonSchema.IString.format} - * - {@link OpenApi.IJsonSchema.IString.pattern} - * - {@link OpenApi.IJsonSchema.IString.contentMediaType} - * - {@link OpenApi.IJsonSchema.IString.default} - * - {@link OpenApi.IJsonSchema.IArray.minItems} - * - {@link OpenApi.IJsonSchema.IArray.maxItems} - * - {@link OpenApi.IJsonSchema.IArray.unique} - * - * Additionally, OpenAI cannot define the {@link IChatGptSchema.description} - * property for the {@link IChatGptSchema.IReference} type, and does not - * understand encapsulation of the {@link IChatGptSchema.IAnyOf} type. Therefore, - * the {@link IChatGptSchema.description} is written to the parent object type, - * not the reference type. - * - * ```json - * { - * "type": "object", - * "description": "### Description of {@link something} property.\n\n> Hello?", - * "properties": { - * "something": { - * "$ref": "#/$defs/SomeObject" - * } - * } - * } - * ``` - * - * @author Jeongho Nam - https://github.com/samchon - * @reference https://platform.openai.com/docs/guides/function-calling - * @reference https://platform.openai.com/docs/guides/structured-outputs - * @warning Specified not only by official documentation, but also by - * experimental validation. Therefore, definitions may be inaccurate or - * change in the future. If you find wrong or outdated definitions, - * please report via issue. - * @issue https://github.com/samchon/openapi/issues - */ -export type IChatGptSchema = - | IChatGptSchema.IBoolean - | IChatGptSchema.IInteger - | IChatGptSchema.INumber - | IChatGptSchema.IString - | IChatGptSchema.IArray - | IChatGptSchema.IObject - | IChatGptSchema.IReference - | IChatGptSchema.IAnyOf - | IChatGptSchema.INull - | IChatGptSchema.IUnknown; -export namespace IChatGptSchema { - /** Configuration for ChatGPT schema composition. */ - export interface IConfig { - /** - * Whether to allow reference type in everywhere. - * - * If you configure this property to `false`, most of reference types - * represented by {@link IChatGptSchema.IReference} would be escaped to a - * plain type unless recursive type case. - * - * This is because the lower version of ChatGPT does not understand the - * reference type well, and even the modern version of ChatGPT sometimes - * occur the hallucination. - * - * However, the reference type makes the schema size smaller, so that - * reduces the LLM token cost. Therefore, if you're using the modern version - * of ChatGPT, and want to reduce the LLM token cost, you can configure this - * property to `true`. - * - * @default true - */ - reference: boolean; - - /** - * Whether to apply the strict mode. - * - * If you configure this property to `true`, the ChatGPT function calling - * does not allow optional properties and dynamic key typed properties in - * the {@link IChatGptSchema.IObject} type. Instead, it increases the success - * rate of the function calling. - * - * By the way, if you utilize the {@link typia.validate} function and give - * its validation feedback to the ChatGPT, its performance is much better - * than the strict mode. Therefore, I recommend you to just turn off the - * strict mode and utilize the {@link typia.validate} function instead. - * - * @default false - */ - strict?: boolean; - } - - /** - * Type for function parameters. - * - * `IChatGptSchema.IParameters` defines a function's parameters as a keyword - * object type, where each property represents a named parameter. - * - * It can also be used for structured output metadata to define the expected - * format of ChatGPT responses. - * - * @reference https://platform.openai.com/docs/guides/structured-outputs - */ - export interface IParameters extends Omit { - /** Collection of the named types. */ - $defs: Record; - - /** - * Additional properties information. - * - * The `additionalProperties` defines the type schema for additional - * properties that are not listed in the {@link properties}. - * - * By the way, it is not allowed at the parameters level. - */ - additionalProperties: false; - } - - /** Boolean type info. */ - export interface IBoolean extends IJsonSchemaAttribute.IBoolean { - /** Enumeration values. */ - enum?: Array; - } - - /** Integer type info. */ - export interface IInteger extends IJsonSchemaAttribute.IInteger { - /** Enumeration values. */ - enum?: Array; - } - - /** Number (double) type info. */ - export interface INumber extends IJsonSchemaAttribute.INumber { - /** Enumeration values. */ - enum?: Array; - } - - /** String type info. */ - export interface IString extends IJsonSchemaAttribute.IString { - /** Enumeration values. */ - enum?: Array; - - /** Default value. */ - default?: string; - } - - /** Array type info. */ - export interface IArray extends IJsonSchemaAttribute.IArray { - /** - * Items type info. - * - * The `items` means the type of the array elements. In other words, it is - * the type schema info of the `T` in the TypeScript array type `Array`. - */ - items: IChatGptSchema; - } - - /** Object type info. */ - export interface IObject extends IJsonSchemaAttribute.IObject { - /** - * Properties of the object. - * - * The `properties` means a list of key-value pairs of the object's regular - * properties. The key is the name of the regular property, and the value is - * the type schema info. - */ - properties: Record; - - /** - * Additional properties information. - * - * The `additionalProperties` defines the type schema for additional - * properties that are not listed in the {@link properties}. - * - * If the value is `true`, it means that the additional properties are not - * restricted. They can be any type. Otherwise, if the value is - * {@link IChatGptSchema} type, it means that the additional properties must - * follow the type schema info. - * - * - `true`: `Record` - * - `IChatGptSchema`: `Record` - * - * Note: If you've configured {@link IChatGptSchema.IConfig.strict} as - * `true`, ChatGPT function calling does not support dynamic key typed - * properties, so `additionalProperties` is always `false`. - */ - additionalProperties?: boolean | IChatGptSchema; - - /** - * List of required property keys. - * - * The `required` contains a list of property keys from {@link properties} - * that must be provided. Properties not listed in `required` are optional, - * while those listed must be filled. - * - * Below is an example of {@link properties} and `required`: - * - * ```typescript - * interface SomeObject { - * id: string; - * email: string; - * name?: string; - * } - * ``` - * - * As you can see, `id` and `email` {@link properties} are {@link required}, - * so they are listed in the `required` array. - * - * ```json - * { - * "type": "object", - * "properties": { - * "id": { "type": "string" }, - * "email": { "type": "string" }, - * "name": { "type": "string" } - * }, - * "required": ["id", "email"] - * } - * ``` - */ - required: string[]; - } - - /** Reference type directing to named schema. */ - export interface IReference extends IJsonSchemaAttribute { - /** - * Reference to the named schema. - * - * The `$ref` is a reference to a named schema. The format follows the JSON - * Pointer specification. In OpenAPI, the `$ref` starts with `#/$defs/` - * which indicates the type is stored in the - * {@link IChatGptSchema.IParameters.$defs} object. - * - * - `#/$defs/SomeObject` - * - `#/$defs/AnotherObject` - */ - $ref: string; - } - - /** - * Union type. - * - * `IAnyOf` represents a union type in TypeScript (`A | B | C`). - * - * For reference, even if your Swagger (or OpenAPI) document defines `anyOf` - * instead of `oneOf`, {@link IChatGptSchema} forcibly converts it to `anyOf` - * type. - */ - export interface IAnyOf extends IJsonSchemaAttribute { - /** List of the union types. */ - anyOf: Exclude[]; - - /** Discriminator info of the union type. */ - "x-discriminator"?: IAnyOf.IDiscriminator; - } - export namespace IAnyOf { - /** Discriminator info of the union type. */ - export interface IDiscriminator { - /** Property name for the discriminator. */ - propertyName: string; - - /** - * Mapping of discriminator values to schema names. - * - * This property is valid only for {@link IReference} typed - * {@link IAnyOf.anyOf} elements. Therefore, the `key` of `mapping` is the - * discriminator value, and the `value` of `mapping` is the schema name - * like `#/components/schemas/SomeObject`. - */ - mapping?: Record; - } - } - - /** Null type. */ - export interface INull extends IJsonSchemaAttribute.INull {} - - /** Unknown, the `any` type. */ - export interface IUnknown extends IJsonSchemaAttribute.IUnknown {} -} diff --git a/src/structures/IClaudeSchema.ts b/src/structures/IClaudeSchema.ts deleted file mode 100644 index 9294e22b..00000000 --- a/src/structures/IClaudeSchema.ts +++ /dev/null @@ -1,138 +0,0 @@ -import { ILlmSchemaV3_1 } from "./ILlmSchemaV3_1"; - -/** - * Type schema for Claude function calling. - * - * `IClaudeSchema` defines the type schema format for Claude function calling. - * - * `IClaudeSchema` appears to fully support the JSON schema definition of the - * OpenAPI v3.1 specification; {@link OpenApiV3_1.IJsonSchema}. However, since - * {@link OpenApiV3_1.IJsonSchema} has many ambiguous and duplicated expressions, - * `IClaudeSchema` is designed to be clear and simple for Claude function - * calling by utilizing {@link ILlmSchemaV3_1}, which has been transformed from - * {@link OpenApi.IJsonSchema} for convenience and clarity. - * - * Therefore, `IClaudeSchema` does not follow the entire OpenAPI v3.1 - * specification. It has specific restrictions and definitions. Here are the - * differences between `ILlmSchemaV3_1` and the OpenAPI v3.1 JSON schema: - * - * - Decompose mixed type: {@link OpenApiV3_1.IJsonSchema.IMixed} - * - Resolve nullable property: - * {@link OpenApiV3_1.IJsonSchema.__ISignificant.nullable} - * - Tuple type is banned: {@link OpenApiV3_1.IJsonSchema.ITuple.prefixItems} - * - Constant type is banned: {@link OpenApiV3_1.IJsonSchema.IConstant} - * - Merge {@link OpenApiV3_1.IJsonSchema.IAnyOf} to {@link IClaudeSchema.IOneOf} - * - Merge {@link OpenApiV3_1.IJsonSchema.IAllOf} to {@link IClaudeSchema.IObject} - * - Merge {@link OpenApiV3_1.IJsonSchema.IRecursiveReference} to - * {@link IClaudeSchema.IReference} - * - Do not support {@link OpenApiV3_1.IJsonSchema.ITuple} type - * - * Compared to {@link OpenApi.IJsonSchema}, the emended JSON schema - * specification: - * - * - {@link IClaudeSchema.IParameters.$defs} instead of - * {@link OpenApi.IComponents.schemas} - * - Do not support {@link OpenApi.IJsonSchema.ITuple} type - * - {@link IClaudeSchema.properties} and {@link IClaudeSchema.required} are always - * defined - * - * For reference, if you compose the `IClaudeSchema` type with the - * {@link IClaudeSchema.IConfig.reference} `false` option (default is `false`), - * only recursively named types will be archived into the - * {@link IClaudeSchema.IParameters.$defs}, and others will be escaped from the - * {@link IClaudeSchema.IReference} type. - * - * @author Jeongho Nam - https://github.com/samchon - * @reference https://docs.anthropic.com/en/docs/build-with-claude/tool-use - * @reference https://docs.anthropic.com/en/docs/test-and-evaluate/strengthen-guardrails/increase-consistency - */ -export type IClaudeSchema = - | IClaudeSchema.IConstant - | IClaudeSchema.IBoolean - | IClaudeSchema.IInteger - | IClaudeSchema.INumber - | IClaudeSchema.IString - | IClaudeSchema.IArray - | IClaudeSchema.IObject - | IClaudeSchema.IReference - | IClaudeSchema.IOneOf - | IClaudeSchema.INull - | IClaudeSchema.IUnknown; -export namespace IClaudeSchema { - /** Configuration for Claude schema composition. */ - export interface IConfig { - /** - * Whether to allow reference types everywhere. - * - * If you configure this property to `false`, most reference types - * represented by {@link IClaudeSchema.IReference} will be escaped to plain - * types unless in recursive type cases. - * - * This is because some smaller LLM models do not understand reference types - * well, and even large LLM models sometimes experience hallucinations. - * - * However, reference types make the schema size smaller, reducing LLM token - * costs. Therefore, if you're using a large LLM model and want to reduce - * token costs, you can configure this property to `true`. - * - * @default true - */ - reference: boolean; - } - - /** - * Type for function parameters. - * - * `IClaudeSchema.IParameters` defines a function's parameters as a keyword - * object type. - * - * It can also be used for structured output metadata. - * - * @reference https://platform.openai.com/docs/guides/structured-outputs - */ - export type IParameters = ILlmSchemaV3_1.IParameters; - - /** Constant value type. */ - export type IConstant = ILlmSchemaV3_1.IConstant; - - /** Boolean type info. */ - export type IBoolean = ILlmSchemaV3_1.IBoolean; - - /** Integer type info. */ - export type IInteger = ILlmSchemaV3_1.IInteger; - - /** Number (double) type info. */ - export type INumber = ILlmSchemaV3_1.INumber; - - /** String type info. */ - export type IString = ILlmSchemaV3_1.IString; - - /** Array type info. */ - export type IArray = ILlmSchemaV3_1.IArray; - - /** Object type info. */ - export type IObject = ILlmSchemaV3_1.IObject; - - /** Reference type directing to named schema. */ - export type IReference = ILlmSchemaV3_1.IReference; - - /** - * Union type. - * - * `IOneOf` represents a union type in TypeScript (`A | B | C`). - * - * For reference, even if your Swagger (or OpenAPI) document defines `anyOf` - * instead of `oneOf`, {@link OpenApi} forcibly converts it to `oneOf` type. - */ - export type IOneOf = ILlmSchemaV3_1.IOneOf; - export namespace IOneOf { - /** Discriminator information of the union type. */ - export type IDiscriminator = ILlmSchemaV3_1.IOneOf.IDiscriminator; - } - - /** Null type. */ - export type INull = ILlmSchemaV3_1.INull; - - /** Unknown, the `any` type. */ - export type IUnknown = ILlmSchemaV3_1.IUnknown; -} diff --git a/src/structures/IGeminiSchema.ts b/src/structures/IGeminiSchema.ts deleted file mode 100644 index cdc7d122..00000000 --- a/src/structures/IGeminiSchema.ts +++ /dev/null @@ -1,411 +0,0 @@ -import { IJsonSchemaAttribute } from "./IJsonSchemaAttribute"; - -/** - * Type schema info for Gemini function calling. - * - * `IGeminiSchema` is a type schema info for Gemini function calling, - * implemented according to the official Gemini guide documentation - * specification. - * - * `IGeminiSchema` basically follows the JSON schema definition of the OpenAPI - * v3.1 specification; {@link OpenApiV3_1.IJsonSchema}. Although Gemini had - * significant limitations in earlier versions (prior to 2025-11-05), it now - * supports nearly all JSON schema features including union types, reference - * types, and various constraint properties. - * - * In earlier versions, Gemini blocked virtually all JSON schema specifications - * such as `anyOf`, `$ref`, `format`, `maxItems`, making function calling - * practically impossible. However, these limitations have been removed in recent - * updates. - * - * `IGeminiSchema` provides a type definition that strictly follows the Gemini - * official specification. - * - * Here is the list of how `IGeminiSchema` is different with the OpenAPI v3.1 - * JSON schema: - * - * - Decompose mixed type: {@link OpenApiV3_1.IJsonSchema.IMixed} - * - Resolve nullable property: - * {@link OpenApiV3_1.IJsonSchema.__ISignificant.nullable} - * - Tuple type is banned: {@link OpenApiV3_1.IJsonSchema.ITuple.prefixItems} - * - Constant type is banned: {@link OpenApiV3_1.IJsonSchema.IConstant} - * - Merge {@link OpenApiV3_1.IJsonSchema.IOneOf} to {@link IGeminiSchema.IAnyOf} - * - Merge {@link OpenApiV3_1.IJsonSchema.IAllOf} to {@link IGeminiSchema.IObject} - * - Merge {@link OpenApiV3_1.IJsonSchema.IRecursiveReference} to - * {@link IGeminiSchema.IReference} - * - * Compared to {@link OpenApi.IJsonSchema}, the emended JSON schema - * specification: - * - * - {@link IGeminiSchema.IAnyOf} instead of {@link OpenApi.IJsonSchema.IOneOf} - * - {@link IGeminiSchema.IParameters.$defs} instead of - * {@link OpenApi.IJsonSchema.IComponents.schemas} - * - Do not support {@link OpenApi.IJsonSchema.ITuple} type - * - {@link IGeminiSchema.properties} and {@link IGeminiSchema.required} are always - * defined - * - * For reference, if you compose the `IGeminiSchema` type with the - * {@link IGeminiSchema.IConfig.reference} `false` option (default is `false`), - * only recursively named types are archived into the - * {@link IGeminiSchema.IParameters.$defs}, and others are escaped from the - * {@link IGeminiSchema.IReference} type. - * - * @author Jeongho Nam - https://github.com/samchon - * @reference https://cloud.google.com/vertex-ai/generative-ai/docs/model-reference/function-calling - * @reference https://cloud.google.com/vertex-ai/generative-ai/docs/multimodal/function-calling - * @reference https://ai.google.dev/gemini-api/docs/structured-output - * @warning Specified not only by the official documentation, but also by - * experimental validation. Therefore, definitions may be inaccurate or - * change in the future. If you find wrong or outdated definitions, - * please report via issue. - * @issue https://github.com/samchon/openapi/issues - */ -export type IGeminiSchema = - | IGeminiSchema.IBoolean - | IGeminiSchema.IInteger - | IGeminiSchema.INumber - | IGeminiSchema.IString - | IGeminiSchema.IArray - | IGeminiSchema.IObject - | IGeminiSchema.IReference - | IGeminiSchema.IAnyOf - | IGeminiSchema.INull - | IGeminiSchema.IUnknown; -export namespace IGeminiSchema { - /** Configuration for the Gemini schema composition. */ - export interface IConfig { - /** - * Whether to allow reference type in everywhere. - * - * If you configure this property to `false`, most of reference types - * represented by {@link IGeminiSchema.IReference} would be escaped to a - * plain type unless recursive type case. - * - * This is because the lower version of ChatGPT does not understand the - * reference type well, and even the modern version of ChatGPT sometimes - * occur the hallucination. - * - * However, the reference type makes the schema size smaller, so that - * reduces the LLM token cost. Therefore, if you're using the modern version - * of ChatGPT, and want to reduce the LLM token cost, you can configure this - * property to `true`. - * - * @default true - */ - reference: boolean; - } - - /** - * Type for function parameters. - * - * `IGeminiSchema.IParameters` defines a function's parameters as a keyword - * object type, where each property represents a named parameter. - * - * It can also be used for structured output metadata to define the expected - * format of ChatGPT responses. - * - * @reference https://platform.openai.com/docs/guides/structured-outputs - */ - export interface IParameters extends Omit { - /** Collection of the named types. */ - $defs: Record; - - /** - * Additional properties information. - * - * The `additionalProperties` defines the type schema for additional - * properties that are not listed in the {@link properties}. - * - * By the way, it is not allowed at the parameters level. - */ - additionalProperties: false; - } - - /** Boolean type info. */ - export interface IBoolean extends IJsonSchemaAttribute.IBoolean { - /** Enumeration values. */ - enum?: Array; - - /** Default value. */ - default?: boolean; - } - - /** Integer type info. */ - export interface IInteger extends IJsonSchemaAttribute.IInteger { - /** Enumeration values. */ - enum?: Array; - - /** - * Default value. - * - * @type int64 - */ - default?: number; - - /** - * Minimum value restriction. - * - * @type int64 - */ - minimum?: number; - - /** - * Maximum value restriction. - * - * @type int64 - */ - maximum?: number; - - /** Exclusive minimum value restriction. */ - exclusiveMinimum?: number; - - /** Exclusive maximum value restriction. */ - exclusiveMaximum?: number; - - /** - * Multiple of value restriction. - * - * @type uint64 - * @exclusiveMinimum 0 - */ - multipleOf?: number; - } - - /** Number (double) type info. */ - export interface INumber extends IJsonSchemaAttribute.INumber { - /** Enumeration values. */ - enum?: Array; - - /** Default value. */ - default?: number; - - /** Minimum value restriction. */ - minimum?: number; - - /** Maximum value restriction. */ - maximum?: number; - - /** Exclusive minimum value restriction. */ - exclusiveMinimum?: number; - - /** Exclusive maximum value restriction. */ - exclusiveMaximum?: number; - - /** - * Multiple of value restriction. - * - * @exclusiveMinimum 0 - */ - multipleOf?: number; - } - - /** String type info. */ - export interface IString extends IJsonSchemaAttribute.IString { - /** Enumeration values. */ - enum?: Array; - - /** Default value. */ - default?: string; - - /** Format restriction. */ - format?: - | "binary" - | "byte" - | "password" - | "regex" - | "uuid" - | "email" - | "hostname" - | "idn-email" - | "idn-hostname" - | "iri" - | "iri-reference" - | "ipv4" - | "ipv6" - | "uri" - | "uri-reference" - | "uri-template" - | "url" - | "date-time" - | "date" - | "time" - | "duration" - | "json-pointer" - | "relative-json-pointer" - | (string & {}); - - /** Pattern restriction. */ - pattern?: string; - - /** Content media type restriction. */ - contentMediaType?: string; - - /** - * Minimum length restriction. - * - * @type uint64 - */ - minLength?: number; - - /** - * Maximum length restriction. - * - * @type uint64 - */ - maxLength?: number; - } - - /** Array type info. */ - export interface IArray extends IJsonSchemaAttribute.IArray { - /** - * Items type info. - * - * The `items` means the type of the array elements. In other words, it is - * the type schema info of the `T` in the TypeScript array type `Array`. - */ - items: IGeminiSchema; - - /** - * Unique items restriction. - * - * If this property value is `true`, target array must have unique items. - */ - uniqueItems?: boolean; - - /** - * Minimum items restriction. - * - * Restriction of minimum number of items in the array. - * - * @type uint64 - */ - minItems?: number; - - /** - * Maximum items restriction. - * - * Restriction of maximum number of items in the array. - * - * @type uint64 - */ - maxItems?: number; - } - - /** Object type info. */ - export interface IObject extends IJsonSchemaAttribute.IObject { - /** - * Properties of the object. - * - * The `properties` means a list of key-value pairs of the object's regular - * properties. The key is the name of the regular property, and the value is - * the type schema info. - */ - properties: Record; - - /** - * Additional properties' info. - * - * The `additionalProperties` means the type schema info of the additional - * properties that are not listed in the {@link properties}. - * - * If the value is `true`, it means that the additional properties are not - * restricted. They can be any type. Otherwise, if the value is - * {@link IGeminiSchema} type, it means that the additional properties must - * follow the type schema info. - * - * - `true`: `Record` - * - `IGeminiSchema`: `Record` - */ - additionalProperties?: boolean | IGeminiSchema; - - /** - * List of required property keys. - * - * The `required` contains a list of property keys from {@link properties} - * that must be provided. Properties not listed in `required` are optional, - * while those listed must be filled. - * - * Below is an example of {@link properties} and `required`: - * - * ```typescript - * interface SomeObject { - * id: string; - * email: string; - * name?: string; - * } - * ``` - * - * As you can see, `id` and `email` {@link properties} are {@link required}, - * so they are listed in the `required` array. - * - * ```json - * { - * "type": "object", - * "properties": { - * "id": { "type": "string" }, - * "email": { "type": "string" }, - * "name": { "type": "string" } - * }, - * "required": ["id", "email"] - * } - * ``` - */ - required: string[]; - } - - /** Reference type directing to named schema. */ - export interface IReference extends IJsonSchemaAttribute { - /** - * Reference to the named schema. - * - * The `$ref` is a reference to a named schema. The format follows the JSON - * Pointer specification. In OpenAPI, the `$ref` starts with `#/$defs/` - * which indicates the type is stored in the - * {@link IGeminiSchema.IParameters.$defs} object. - * - * - `#/$defs/SomeObject` - * - `#/$defs/AnotherObject` - */ - $ref: string; - } - - /** - * Union type. - * - * `IAnyOf` represents a union type in TypeScript (`A | B | C`). - * - * For reference, even if your Swagger (or OpenAPI) document defines `anyOf` - * instead of `oneOf`, {@link IGeminiSchema} forcibly converts it to `anyOf` - * type. - */ - export interface IAnyOf extends IJsonSchemaAttribute { - /** List of the union types. */ - anyOf: Exclude[]; - - /** Discriminator info of the union type. */ - "x-discriminator"?: IAnyOf.IDiscriminator; - } - export namespace IAnyOf { - /** Discriminator info of the union type. */ - export interface IDiscriminator { - /** Property name for the discriminator. */ - propertyName: string; - - /** - * Mapping of discriminator values to schema names. - * - * This property is valid only for {@link IReference} typed - * {@link IAnyOf.anyOf} elements. Therefore, the `key` of `mapping` is the - * discriminator value, and the `value` of `mapping` is the schema name - * like `#/components/schemas/SomeObject`. - */ - mapping?: Record; - } - } - - /** Null type. */ - export interface INull extends IJsonSchemaAttribute.INull {} - - /** Unknown, the `any` type. */ - export interface IUnknown extends IJsonSchemaAttribute.IUnknown {} -} diff --git a/src/structures/IHttpLlmApplication.ts b/src/structures/IHttpLlmApplication.ts index 62055e3a..c1431ec6 100644 --- a/src/structures/IHttpLlmApplication.ts +++ b/src/structures/IHttpLlmApplication.ts @@ -2,7 +2,6 @@ import { OpenApi } from "../OpenApi"; import { IHttpLlmFunction } from "./IHttpLlmFunction"; import { IHttpMigrateRoute } from "./IHttpMigrateRoute"; import { ILlmSchema } from "./ILlmSchema"; -import { ILlmSchemaV3 } from "./ILlmSchemaV3"; /** * Application of LLM function call from OpenAPI document. @@ -12,17 +11,16 @@ import { ILlmSchemaV3 } from "./ILlmSchemaV3"; * {@link OpenApi.IDocument OpenAPI document} and its * {@link OpenApi.IOperation operation} metadata. It also contains * {@link IHttpLlmApplication.errors failed operations}, and adjusted - * {@link IHttpLlmApplication.options options} during the `IHttpLlmApplication` - * construction. + * {@link IHttpLlmApplication.config configuration} during the + * `IHttpLlmApplication` construction. * * About the {@link OpenApi.IOperation API operations}, they are converted to * {@link IHttpLlmFunction} type which represents LLM function calling schema. By * the way, if there're some types which does not supported by LLM, the * operation would be failed and pushed into the - * {@link IHttpLlmApplication.errors}. Otherwise not, the operation would be + * {@link IHttpLlmApplication.errors}. Otherwise, the operation would be * successfully converted to {@link IHttpLlmFunction} and its type schemas are - * downgraded to {@link OpenApiV3.IJsonSchema} and converted to - * {@link ILlmSchemaV3}. + * converted to {@link ILlmSchema}. * * For reference, the arguments type is composed by below rule. * @@ -42,7 +40,7 @@ import { ILlmSchemaV3 } from "./ILlmSchemaV3"; * must be composed by Human, not by LLM. File uploading feature or some * sensitive information like secret key (password) are the examples. In that * case, you can separate the function parameters to both LLM and Human sides by - * configuring the {@link IHttpLlmApplication.IOptions.separate} property. The + * configuring the {@link IHttpLlmApplication.IConfig.separate} property. The * separated parameters are assigned to the {@link IHttpLlmFunction.separated} * property. * @@ -53,17 +51,14 @@ import { ILlmSchemaV3 } from "./ILlmSchemaV3"; * continue the next conversation based on the return value. * * Additionally, if you've configured - * {@link IHttpLlmApplication.IOptions.separate}, so that the parameters are + * {@link IHttpLlmApplication.IConfig.separate}, so that the parameters are * separated to Human and LLM sides, you can merge these human and LLM sides' * parameters into one through {@link HttpLlm.mergeParameters} before the actual * LLM function call execution. * * @author Jeongho Nam - https://github.com/samchon */ -export interface IHttpLlmApplication { - /** Model of the target LLM. */ - model: Model; - +export interface IHttpLlmApplication { /** * List of function metadata. * @@ -72,63 +67,66 @@ export interface IHttpLlmApplication { * When you want to execute the function with LLM constructed arguments, you * can do it through {@link LlmFetcher.execute} function. */ - functions: IHttpLlmFunction[]; + functions: IHttpLlmFunction[]; /** List of errors occurred during the composition. */ errors: IHttpLlmApplication.IError[]; /** Configuration for the application. */ - options: IHttpLlmApplication.IOptions; + config: IHttpLlmApplication.IConfig; } export namespace IHttpLlmApplication { - /** Options for the HTTP LLM application schema composition. */ - export type IOptions = - ILlmSchema.ModelConfig[Model] & { - /** - * Separator function for the parameters. - * - * When composing parameter arguments through LLM function call, there can - * be a case that some parameters must be composed by human, or LLM cannot - * understand the parameter. - * - * For example, if the parameter type has configured - * {@link IGeminiSchema.IString.contentMediaType} which indicates file - * uploading, it must be composed by human, not by LLM (Large Language - * Model). - * - * In that case, if you configure this property with a function that - * predicating whether the schema value must be composed by human or not, - * the parameters would be separated into two parts. - * - * - {@link ILlmFunction.separated.llm} - * - {@link ILlmFunction.separated.human} - * - * When writing the function, note that returning value `true` means to be - * a human composing the value, and `false` means to LLM composing the - * value. Also, when predicating the schema, it would better to utilize - * the {@link GeminiTypeChecker} like features. - * - * @default null - * @param schema Schema to be separated. - * @returns Whether the schema value must be composed by human or not. - */ - separate?: null | ((schema: ILlmSchema.ModelSchema[Model]) => boolean); + /** Configuration for the HTTP LLM application schema composition. */ + export interface IConfig extends ILlmSchema.IConfig { + /** + * Separator function for the parameters. + * + * When composing parameter arguments through LLM function call, there can + * be a case that some parameters must be composed by human, or LLM cannot + * understand the parameter. + * + * For example, if the parameter type has configured + * {@link ILlmSchema.IString.contentMediaType} which indicates file + * uploading, it must be composed by human, not by LLM (Large Language + * Model). + * + * In that case, if you configure this property with a function that + * predicating whether the schema value must be composed by human or not, + * the parameters would be separated into two parts. + * + * - {@link ILlmFunction.separated.llm} + * - {@link ILlmFunction.separated.human} + * + * When writing the function, note that returning value `true` means to be a + * human composing the value, and `false` means to LLM composing the value. + * Also, when predicating the schema, it would better to utilize the + * {@link LlmTypeChecker} like features. + * + * @default null + * @param schema Schema to be separated. + * @returns Whether the schema value must be composed by human or not. + */ + separate: null | ((schema: ILlmSchema) => boolean); - /** - * Maximum length of function name. - * - * When a function name is longer than this value, it will be truncated. - * - * If not possible to truncate due to the duplication, the function name - * would be modified to randomly generated (UUID v4). - * - * @default 64 - */ - maxLength?: number; + /** + * Maximum length of function name. + * + * When a function name is longer than this value, it will be truncated. + * + * If not possible to truncate due to the duplication, the function name + * would be modified to randomly generated (UUID v4). + * + * @default 64 + */ + maxLength: number; - /** Whether to disallow superfluous properties or not. */ - equals?: boolean; - }; + /** + * Whether to disallow superfluous properties or not. + * + * @default false + */ + equals: boolean; + } /** Error occurred in the composition. */ export interface IError { diff --git a/src/structures/IHttpLlmController.ts b/src/structures/IHttpLlmController.ts index 593caeb3..6e1bde63 100644 --- a/src/structures/IHttpLlmController.ts +++ b/src/structures/IHttpLlmController.ts @@ -2,7 +2,6 @@ import { IHttpConnection } from "./IHttpConnection"; import { IHttpLlmApplication } from "./IHttpLlmApplication"; import { IHttpLlmFunction } from "./IHttpLlmFunction"; import { IHttpResponse } from "./IHttpResponse"; -import { ILlmSchema } from "./ILlmSchema"; /** * Controller of HTTP LLM function calling. @@ -31,12 +30,11 @@ import { ILlmSchema } from "./ILlmSchema"; * { * protocol: "http", * name: "shopping", - * application: HttpLlm.application( - * model: "chatgpt", + * application: HttpLlm.application({ * document: await fetch( * "https://shopping-be.wrtn.io/editor/swagger.json", * ).then((r) => r.json()), - * ), + * }), * connection: { * host: "https://shopping-be.wrtn.io", * headers: { @@ -57,10 +55,9 @@ import { ILlmSchema } from "./ILlmSchema"; * - {@link ILlmController} for TypeScript * * @author Jeongho Nam - https://github.com/samchon - * @template Model Type of the LLM model * @reference https://wrtnlabs.io/agentica/docs/core/controller/swagger/ */ -export interface IHttpLlmController { +export interface IHttpLlmController { /** Protocol discriminator. */ protocol: "http"; @@ -68,7 +65,7 @@ export interface IHttpLlmController { name: string; /** Application schema of function calling. */ - application: IHttpLlmApplication; + application: IHttpLlmApplication; /** * Connection to the server. @@ -93,10 +90,10 @@ export interface IHttpLlmController { connection: IHttpConnection; /** Application schema. */ - application: IHttpLlmApplication; + application: IHttpLlmApplication; /** Function schema. */ - function: IHttpLlmFunction; + function: IHttpLlmFunction; /** * Arguments of the function calling. diff --git a/src/structures/IHttpLlmFunction.ts b/src/structures/IHttpLlmFunction.ts index b8c23f47..d789eb68 100644 --- a/src/structures/IHttpLlmFunction.ts +++ b/src/structures/IHttpLlmFunction.ts @@ -19,11 +19,7 @@ import { IValidation } from "./IValidation"; * * For reference, different between `IHttpLlmFunction` and its origin source * {@link OpenApi.IOperation} is, `IHttpLlmFunction` has converted every type - * schema information from {@link OpenApi.IJsonSchema} to {@link ILlmSchemaV3} to - * escape {@link OpenApi.IJsonSchema.IReference reference types}, and downgrade - * the version of the JSON schema to OpenAPI 3.0. It's because LLM function call - * feature cannot understand both reference types and OpenAPI 3.1 - * specification. + * schema information from {@link OpenApi.IJsonSchema} to {@link ILlmSchema}. * * Additionally, the properties' rule is: * @@ -42,7 +38,7 @@ import { IValidation } from "./IValidation"; * @author Jeongho Nam - https://github.com/samchon * @reference https://platform.openai.com/docs/guides/function-calling */ -export interface IHttpLlmFunction { +export interface IHttpLlmFunction { /** HTTP method of the endpoint. */ method: "get" | "post" | "patch" | "put" | "delete"; @@ -75,14 +71,14 @@ export interface IHttpLlmFunction { * * > - Example 1 * - * > - Path: `POST /shopping/sellers/sales` - * > - Accessor: `shopping.sellers.sales.post` + * > - Path: `POST /shopping/sellers/sales` + * > - Accessor: `shopping.sellers.sales.post` * > - Example 2 * - * > - Endpoint: `GET - * > /shoppings/sellers/sales/:saleId/reviews/:reviewId/comments/:id` - * > - Accessor: - * > `shoppings.sellers.sales.reviews.getBySaleIdAndReviewIdAndCommentId` + * > - Endpoint: `GET + * > /shoppings/sellers/sales/:saleId/reviews/:reviewId/comments/:id` + * > - Accessor: + * > `shoppings.sellers.sales.reviews.getBySaleIdAndReviewIdAndCommentId` * * @maxLength 64 */ @@ -91,9 +87,9 @@ export interface IHttpLlmFunction { /** * List of parameter types. * - * If you've configured {@link IHttpLlmApplication.IOptions.keyword} as `true`, + * If you've configured {@link IHttpLlmApplication.IConfig.keyword} as `true`, * number of {@link IHttpLlmFunction.parameters} are always 1 and the first - * parameter's type is always {@link ILlmSchemaV3.IObject}. The properties' + * parameter's type is always {@link ILlmSchema.IObject}. The properties' * rule is: * * - `pathParameters`: Path parameters of {@link IHttpMigrateRoute.parameters} @@ -119,15 +115,14 @@ export interface IHttpLlmFunction { * ]; * ``` */ - parameters: ILlmSchema.ModelParameters[Model]; + parameters: ILlmSchema.IParameters; /** * Collection of separated parameters. * - * Filled only when {@link IHttpLlmApplication.IOptions.separate} is - * configured. + * Filled only when {@link IHttpLlmApplication.IConfig.separate} is configured. */ - separated?: IHttpLlmFunction.ISeparated; + separated?: IHttpLlmFunction.ISeparated; /** * Expected return type. @@ -135,7 +130,7 @@ export interface IHttpLlmFunction { * If the target operation returns nothing (`void`), the `output` would be * `undefined`. */ - output?: ILlmSchema.ModelSchema[Model] | undefined; + output?: ILlmSchema | undefined; /** * Description of the function. @@ -144,8 +139,8 @@ export interface IHttpLlmFunction { * * 1. Starts with the {@link OpenApi.IOperation.summary} paragraph * 2. The next paragraphs are filled with the - * {@link OpenApi.IOperation.description}. If the first - * paragraph of {@link OpenApi.IOperation.description} matches the + * {@link OpenApi.IOperation.description}. If the first paragraph of + * {@link OpenApi.IOperation.description} matches the * {@link OpenApi.IOperation.summary}, it is not duplicated * 3. Parameter descriptions are added with `@param` tags * 4. {@link OpenApi.IOperation.security Security requirements} are added with @@ -158,8 +153,8 @@ export interface IHttpLlmFunction { * description to determine which function to call. * * Also, when the LLM converses with users, the `description` explains the - * function to the user. Therefore, the `description` property has the - * highest priority and should be carefully considered. + * function to the user. Therefore, the `description` property has the highest + * priority and should be carefully considered. */ description?: string | undefined; @@ -188,15 +183,15 @@ export interface IHttpLlmFunction { * types like `number` defined in the {@link parameters} schema, LLMs often * provide a `string` typed value instead. * - * In such cases, you should provide validation feedback to the LLM using - * this `validate` function. The `validate` function returns detailed - * information about type errors in the arguments. + * In such cases, you should provide validation feedback to the LLM using this + * `validate` function. The `validate` function returns detailed information + * about type errors in the arguments. * * Based on my experience, OpenAI's `gpt-4o-mini` model tends to construct - * invalid function calling arguments about 50% of the time on the first attempt. - * However, when corrected through this `validate` function, the success - * rate jumps to 99% on the second attempt, and I've never seen a failure - * on the third attempt. + * invalid function calling arguments about 50% of the time on the first + * attempt. However, when corrected through this `validate` function, the + * success rate jumps to 99% on the second attempt, and I've never seen a + * failure on the third attempt. * * > If you have {@link separated} parameters, use the * > {@link IHttpLlmFunction.ISeparated.validate} function instead when @@ -231,17 +226,17 @@ export interface IHttpLlmFunction { } export namespace IHttpLlmFunction { /** Collection of separated parameters. */ - export interface ISeparated { + export interface ISeparated { /** * Parameters that would be composed by the LLM. * * Even though no property exists in the LLM side, the `llm` property would * have at least empty object type. */ - llm: ILlmSchema.ModelParameters[Model]; + llm: ILlmSchema.IParameters; /** Parameters that would be composed by the human. */ - human: ILlmSchema.ModelParameters[Model] | null; + human: ILlmSchema.IParameters | null; /** * Validate function for separated arguments. @@ -259,10 +254,10 @@ export namespace IHttpLlmFunction { * > information about type errors in the arguments. * * > Based on my experience, OpenAI's `gpt-4o-mini` model tends to construct - * > invalid function calling arguments about 50% of the time on the first attempt. - * > However, when corrected through this `validate` function, the success - * > rate jumps to 99% on the second attempt, and I've never seen a failure - * > on the third attempt. + * > invalid function calling arguments about 50% of the time on the first + * > attempt. However, when corrected through this `validate` function, the + * > success rate jumps to 99% on the second attempt, and I've never seen a + * > failure on the third attempt. * * @param args Arguments to validate * @returns Validation result diff --git a/src/structures/ILlmApplication.ts b/src/structures/ILlmApplication.ts index 31a24b5c..de55f8f8 100644 --- a/src/structures/ILlmApplication.ts +++ b/src/structures/ILlmApplication.ts @@ -7,42 +7,36 @@ import { IValidation } from "./IValidation"; * * `ILlmApplication` is a data structure representing a collection of * {@link ILlmFunction LLM function calling schemas}, composed from a native - * TypeScript class (or interface) type by the `typia.llm.application()` function. + * TypeScript class (or interface) type by the `typia.llm.application()` + * function. * * Also, there can be some parameters (or their nested properties) which must be * composed by Human, not by LLM. File uploading feature or some sensitive * information like secret key (password) are the examples. In that case, you * can separate the function parameters to both LLM and human sides by - * configuring the {@link ILlmApplication.IOptions.separate} property. The + * configuring the {@link ILlmApplication.IConfig.separate} property. The * separated parameters are assigned to the {@link ILlmFunction.separated} * property. * * For reference, when both LLM and Human filled parameter values to call, you * can merge them by calling the {@link HttpLlm.mergeParameters} function. In - * other words, if you've configured the - * {@link ILlmApplication.IOptions.separate} property, you have to merge the - * separated parameters before the function call execution. + * other words, if you've configured the {@link ILlmApplication.IConfig.separate} + * property, you have to merge the separated parameters before the function call + * execution. * * @author Jeongho Nam - https://github.com/samchon * @reference https://platform.openai.com/docs/guides/function-calling */ -export interface ILlmApplication< - Model extends ILlmSchema.Model, - Class extends object = any, -> { - /** Model of the LLM. */ - model: Model; - +export interface ILlmApplication { /** * List of function metadata. * * List of function metadata that can be used for the LLM function call. */ - functions: ILlmFunction[]; + functions: ILlmFunction[]; /** Configuration for the application. */ - options: ILlmApplication.IOptions; + config: ILlmApplication.IConfig; /** * Class type, the source of the LLM application. @@ -53,11 +47,9 @@ export interface ILlmApplication< __class?: Class | undefined; } export namespace ILlmApplication { - /** Options for application composition. */ - export type IOptions< - Model extends ILlmSchema.Model, - Class extends object = any, - > = ILlmSchema.ModelConfig[Model] & { + /** Configuration for application composition. */ + export interface IConfig + extends ILlmSchema.IConfig { /** * Separator function for the parameters. * @@ -66,7 +58,7 @@ export namespace ILlmApplication { * understand the parameter. * * For example, if the parameter type has configured - * {@link IGeminiSchema.IString.contentMediaType} which indicates file + * {@link ILlmSchema.IString.contentMediaType} which indicates file * uploading, it must be composed by human, not by LLM (Large Language * Model). * @@ -80,13 +72,13 @@ export namespace ILlmApplication { * When writing the function, note that returning value `true` means to be a * human composing the value, and `false` means to LLM composing the value. * Also, when predicating the schema, it would better to utilize the - * {@link GeminiTypeChecker} like features. + * {@link LlmTypeChecker} like features. * * @default null * @param schema Schema to be separated. * @returns Whether the schema value must be composed by human or not. */ - separate?: null | ((schema: ILlmSchema.ModelSchema[Model]) => boolean); + separate: null | ((schema: ILlmSchema) => boolean); /** * Custom validation functions for specific class methods. @@ -112,8 +104,8 @@ export namespace ILlmApplication { * * @default null */ - validate?: null | Partial>; - }; + validate: null | Partial>; + } /** * Type for custom validation function hooks. diff --git a/src/structures/ILlmController.ts b/src/structures/ILlmController.ts index 0ca0fa7e..c1bb588a 100644 --- a/src/structures/ILlmController.ts +++ b/src/structures/ILlmController.ts @@ -1,5 +1,4 @@ import { ILlmApplication } from "./ILlmApplication"; -import { ILlmSchema } from "./ILlmSchema"; /** * Controller of LLM function calling. @@ -25,11 +24,11 @@ import { ILlmSchema } from "./ILlmSchema"; * model: "gpt-4o-mini", * }, * controllers: [ - * typia.llm.controller( + * typia.llm.controller( * "filesystem", * new ReactNativeFileSystem(), * ), - * typia.llm.controller( + * typia.llm.controller( * "gallery", * new ReactNativeGallery(), * ), @@ -48,15 +47,11 @@ import { ILlmSchema } from "./ILlmSchema"; * - {@link IMcpLlmController} for MCP * * @author Jeongho Nam - https://github.com/samchon - * @template Model Type of the LLM model * @template Class Class type of the function executor * @reference https://typia.io/docs/llm/controller/ * @reference https://wrtnlabs.io/agentica/docs/core/controller/typescript/ */ -export interface ILlmController< - Model extends ILlmSchema.Model, - Class extends object = any, -> { +export interface ILlmController { /** Protocol discriminator. */ protocol: "class"; @@ -64,7 +59,7 @@ export interface ILlmController< name: string; /** Application schema of function calling. */ - application: ILlmApplication; + application: ILlmApplication; /** * Executor of the class function. diff --git a/src/structures/ILlmFunction.ts b/src/structures/ILlmFunction.ts index be42158e..55e69030 100644 --- a/src/structures/ILlmFunction.ts +++ b/src/structures/ILlmFunction.ts @@ -21,10 +21,9 @@ import { IValidation } from "./IValidation"; * [`typia`](https://github.com/samchon/typia) library. * * @author Jeongho Nam - https://github.com/samchon - * @template Model Type of the LLM model * @reference https://platform.openai.com/docs/guides/function-calling */ -export interface ILlmFunction { +export interface ILlmFunction { /** * Representative name of the function. * @@ -33,14 +32,14 @@ export interface ILlmFunction { name: string; /** List of parameter types. */ - parameters: ILlmSchema.ModelParameters[Model]; + parameters: ILlmSchema.IParameters; /** * Collection of separated parameters. * - * Filled only when {@link ILlmApplication.IOptions.separate} is configured. + * Filled only when {@link ILlmApplication.IConfig.separate} is configured. */ - separated?: ILlmFunction.ISeparated; + separated?: ILlmFunction.ISeparated; /** * Expected return type. @@ -48,7 +47,7 @@ export interface ILlmFunction { * If the function returns nothing (`void`), the `output` value would be * `undefined`. */ - output?: ILlmSchema.ModelSchema[Model]; + output?: ILlmSchema | undefined; /** * Description of the function. @@ -57,9 +56,9 @@ export interface ILlmFunction { * purpose of the function to LLMs (Large Language Models). LLMs use this * description to determine which function to call. * - * Also, when the LLM converses with the user, the `description` explains - * the function to the user. Therefore, the `description` property has the - * highest priority and should be carefully considered. + * Also, when the LLM converses with the user, the `description` explains the + * function to the user. Therefore, the `description` property has the highest + * priority and should be carefully considered. */ description?: string | undefined; @@ -112,17 +111,17 @@ export interface ILlmFunction { } export namespace ILlmFunction { /** Collection of separated parameters. */ - export interface ISeparated { + export interface ISeparated { /** * Parameters that would be composed by the LLM. * * Even though no property exists in the LLM side, the `llm` property would * have at least empty object type. */ - llm: ILlmSchema.ModelParameters[Model]; + llm: ILlmSchema.IParameters; /** Parameters that would be composed by the human. */ - human: ILlmSchema.ModelParameters[Model] | null; + human: ILlmSchema.IParameters | null; /** * Validate function of the separated arguments. diff --git a/src/structures/ILlmSchema.ts b/src/structures/ILlmSchema.ts index 9ecd0ea1..6d4b55e8 100644 --- a/src/structures/ILlmSchema.ts +++ b/src/structures/ILlmSchema.ts @@ -1,78 +1,464 @@ -import { IChatGptSchema } from "./IChatGptSchema"; -import { IClaudeSchema } from "./IClaudeSchema"; -import { IGeminiSchema } from "./IGeminiSchema"; -import { ILlmSchemaV3 } from "./ILlmSchemaV3"; -import { ILlmSchemaV3_1 } from "./ILlmSchemaV3_1"; +import { IJsonSchemaAttribute } from "./IJsonSchemaAttribute"; /** - * The schemas for the LLM function calling. + * Type schema info for LLM (Large Language Model) function calling. * - * `ILlmSchema` is a union type collecting every schema for LLM function calling. - * Select a proper schema type according to the LLM provider you're using. + * ## Overview * - * {@link IChatGptSchema} is designed for OpenAI models. It is fully compatible - * with OpenAI's strict mode, handling its restrictions by utilizing JSDoc tags - * in the `description` property to support full JSON schema specifications - * despite OpenAI's constraints. + * `ILlmSchema` is a type schema info for LLM function calling, designed to be + * compatible with multiple LLM providers while following the JSON schema + * specification. * - * {@link IClaudeSchema} is the most recommended option as it most closely follows - * the JSON schema standard with the most concise types and accurate expressions. - * Claude has no JSON schema specification restrictions, making it ideal when - * you're unsure about your AI model's requirements. + * ## Specification * - * {@link IGeminiSchema} is implemented according to the Gemini guide documentation. - * Prior to November 2025, it had severe limitations, but now supports nearly all - * JSON schema specifications. + * `ILlmSchema` basically follows the JSON schema definition of the OpenAPI v3.1 + * specification; {@link OpenApiV3_1.IJsonSchema}. * - * {@link ILlmSchemaV3} and {@link ILlmSchemaV3_1} are middle layer schemas for - * advanced users who need direct control over OpenAPI v3.0 or v3.1 specifications. + * However, it deviates from the standard JSON schema specification and omits + * many features to ensure compatibility across different LLM providers and + * their function calling requirements. + * + * ## Differences from OpenAPI v3.1 + * + * Here is the list of how `ILlmSchema` is different with the OpenAPI v3.1 JSON + * schema: + * + * - Decompose mixed type: {@link OpenApiV3_1.IJsonSchema.IMixed} + * - Resolve nullable property: + * {@link OpenApiV3_1.IJsonSchema.__ISignificant.nullable} + * - Tuple type is banned: {@link OpenApiV3_1.IJsonSchema.ITuple.prefixItems} + * - Constant type is banned: {@link OpenApiV3_1.IJsonSchema.IConstant} + * - Merge {@link OpenApiV3_1.IJsonSchema.IOneOf} to {@link ILlmSchema.IAnyOf} + * - Merge {@link OpenApiV3_1.IJsonSchema.IAllOf} to {@link ILlmSchema.IObject} + * - Merge {@link OpenApiV3_1.IJsonSchema.IRecursiveReference} to + * {@link ILlmSchema.IReference} + * + * ## Differences from OpenApi.IJsonSchema + * + * Compared to {@link OpenApi.IJsonSchema}, the emended JSON schema + * specification: + * + * - {@link ILlmSchema.IAnyOf} instead of {@link OpenApi.IJsonSchema.IOneOf} + * - {@link ILlmSchema.IParameters.$defs} instead of + * {@link OpenApi.IJsonSchema.IComponents.schemas} + * - Do not support {@link OpenApi.IJsonSchema.ITuple} type + * - {@link ILlmSchema.properties} and {@link ILlmSchema.required} are always + * defined + * + * ## Strict Mode + * + * When {@link ILlmSchema.IConfig.strict} mode is enabled, the schema + * transformation follows OpenAI's structured output requirements: + * + * - Every {@link ILlmSchema.IObject.additionalProperties} is forced to `false` + * - Every property in {@link ILlmSchema.IObject.properties} becomes + * {@link ILlmSchema.IObject.required} + * - All constraint properties are removed from the schema and moved to + * {@link IJsonSchemaAttribute.description} in a JSDoc-like format: + * + * - Numeric constraints: `minimum`, `maximum`, `exclusiveMinimum`, + * `exclusiveMaximum`, `multipleOf` + * - String constraints: `minLength`, `maxLength`, `pattern`, `format`, + * `contentMediaType` + * - Array constraints: `minItems`, `maxItems`, `uniqueItems` + * - Example: `@minimum 0`, `@maximum 100`, `@format uuid` * * @author Jeongho Nam - https://github.com/samchon - * @template Model Type of the LLM model - * @reference https://platform.openai.com/docs/guides/function-calling - * @reference https://platform.openai.com/docs/guides/structured-outputs */ -export type ILlmSchema = - ILlmSchema.ModelSchema[Model]; - +export type ILlmSchema = + | ILlmSchema.IBoolean + | ILlmSchema.IInteger + | ILlmSchema.INumber + | ILlmSchema.IString + | ILlmSchema.IArray + | ILlmSchema.IObject + | ILlmSchema.IReference + | ILlmSchema.IAnyOf + | ILlmSchema.INull + | ILlmSchema.IUnknown; export namespace ILlmSchema { - export type Model = "chatgpt" | "claude" | "gemini" | "3.0" | "3.1"; - export interface ModelConfig { - chatgpt: IChatGptSchema.IConfig; - claude: IClaudeSchema.IConfig; - gemini: IGeminiSchema.IConfig; - "3.0": ILlmSchemaV3.IConfig; - "3.1": ILlmSchemaV3_1.IConfig; + /** Configuration for the LLM schema composition. */ + export interface IConfig { + /** + * Whether to allow reference type in everywhere. + * + * If you configure this property to `false`, most of reference types + * represented by {@link ILlmSchema.IReference} would be escaped to a plain + * type unless recursive type comes. + * + * This is because some LLM models do not understand the reference type + * well, and even the modern version of LLM sometimes occur the + * hallucination. + * + * However, the reference type makes the schema size smaller, so that + * reduces the LLM token cost. Therefore, if you're using the modern version + * of LLM, and want to reduce the LLM token cost, you can configure this + * property to `true`. + * + * @default true + */ + reference: boolean; + + /** + * Whether to apply the strict mode. + * + * If you configure this property to `true`, the LLM function calling does + * not allow optional properties and dynamic key typed properties in the + * {@link ILlmSchema.IObject} type. In other words, when strict mode is + * enabled, {@link ILlmSchema.IObject.additionalProperties} is fixed to + * `false`, and every property must be {@link ILlmSchema.IObject.required}. + * + * However, the strict mode actually shows lower performance in practice. If + * you utilize the {@link typia.validate} function and give its validation + * feedback to the LLM, the performance is much better than the strict + * mode. + * + * Therefore, I recommend you to just turn off the strict mode and utilize + * the {@link typia.validate} function instead. + * + * @default false + */ + strict: boolean; + } + + /** + * Type for function parameters. + * + * `ILlmSchema.IParameters` defines a function's parameters as a keyword + * object type, where each property represents a named parameter. + * + * It can also be used for structured output metadata to define the expected + * format of LLM responses. + */ + export interface IParameters extends Omit { + /** + * Collection of the named types. + * + * This record would be filled when {@link IConfig.reference} is `true`, or + * recursive type comes. + */ + $defs: Record; + + /** + * Additional properties information. + * + * The `additionalProperties` defines the type schema for additional + * properties that are not listed in the {@link properties}. + * + * By the way, it is not allowed at the parameters level. + */ + additionalProperties: false; + } + + /** Boolean type info. */ + export interface IBoolean extends IJsonSchemaAttribute.IBoolean { + /** Enumeration values. */ + enum?: Array; + + /** Default value. */ + default?: boolean; + } + + /** Integer type info. */ + export interface IInteger extends IJsonSchemaAttribute.IInteger { + /** Enumeration values. */ + enum?: Array; + + /** + * Default value. + * + * @type int64 + */ + default?: number; + + /** + * Minimum value restriction. + * + * @type int64 + */ + minimum?: number; + + /** + * Maximum value restriction. + * + * @type int64 + */ + maximum?: number; + + /** + * Exclusive minimum value restriction. + * + * @type int64 + */ + exclusiveMinimum?: number; + + /** + * Exclusive maximum value restriction. + * + * @type int64 + */ + exclusiveMaximum?: number; + + /** + * Multiple of value restriction. + * + * @type uint64 + * @exclusiveMinimum 0 + */ + multipleOf?: number; + } + + /** Number (double) type info. */ + export interface INumber extends IJsonSchemaAttribute.INumber { + /** Enumeration values. */ + enum?: Array; + + /** Default value. */ + default?: number; + + /** Minimum value restriction. */ + minimum?: number; + + /** Maximum value restriction. */ + maximum?: number; + + /** Exclusive minimum value restriction. */ + exclusiveMinimum?: number; + + /** Exclusive maximum value restriction. */ + exclusiveMaximum?: number; + + /** + * Multiple of value restriction. + * + * @exclusiveMinimum 0 + */ + multipleOf?: number; + } + + /** String type info. */ + export interface IString extends IJsonSchemaAttribute.IString { + /** Enumeration values. */ + enum?: Array; + + /** Default value. */ + default?: string; + + /** Format restriction. */ + format?: + | "binary" + | "byte" + | "password" + | "regex" + | "uuid" + | "email" + | "hostname" + | "idn-email" + | "idn-hostname" + | "iri" + | "iri-reference" + | "ipv4" + | "ipv6" + | "uri" + | "uri-reference" + | "uri-template" + | "url" + | "date-time" + | "date" + | "time" + | "duration" + | "json-pointer" + | "relative-json-pointer" + | (string & {}); + + /** Pattern restriction. */ + pattern?: string; + + /** Content media type restriction. */ + contentMediaType?: string; + + /** + * Minimum length restriction. + * + * @type uint64 + */ + minLength?: number; + + /** + * Maximum length restriction. + * + * @type uint64 + */ + maxLength?: number; } - export interface ModelParameters { - chatgpt: IChatGptSchema.IParameters; - claude: IClaudeSchema.IParameters; - gemini: IGeminiSchema.IParameters; - "3.0": ILlmSchemaV3.IParameters; - "3.1": ILlmSchemaV3_1.IParameters; + + /** Array type info. */ + export interface IArray extends IJsonSchemaAttribute.IArray { + /** + * Items type info. + * + * The `items` means the type of the array elements. In other words, it is + * the type schema info of the `T` in the TypeScript array type `Array`. + */ + items: ILlmSchema; + + /** + * Unique items restriction. + * + * If this property value is `true`, target array must have unique items. + */ + uniqueItems?: boolean; + + /** + * Minimum items restriction. + * + * Restriction of minimum number of items in the array. + * + * @type uint64 + */ + minItems?: number; + + /** + * Maximum items restriction. + * + * Restriction of maximum number of items in the array. + * + * @type uint64 + */ + maxItems?: number; } - export interface ModelSchema { - chatgpt: IChatGptSchema; - claude: IClaudeSchema; - gemini: IGeminiSchema; - "3.0": ILlmSchemaV3; - "3.1": ILlmSchemaV3_1; + + /** Object type info. */ + export interface IObject extends IJsonSchemaAttribute.IObject { + /** + * Properties of the object. + * + * The `properties` means a list of key-value pairs of the object's regular + * properties. The key is the name of the regular property, and the value is + * the type schema info. + */ + properties: Record; + + /** + * Additional properties' info. + * + * The `additionalProperties` means the type schema info of the additional + * properties that are not listed in the {@link properties}. + * + * If the value is `true`, it means that the additional properties are not + * restricted. They can be any type. Otherwise, if the value is + * {@link ILlmSchema} type, it means that the additional properties must + * follow the type schema info. + * + * - `true`: `Record` + * - `ILlmSchema`: `Record` + * + * Note: When {@link IConfig.strict} mode is enabled, this property is always + * fixed to `false`, meaning no additional properties are allowed. + */ + additionalProperties?: ILlmSchema | boolean; + + /** + * List of required property keys. + * + * The `required` contains a list of property keys from {@link properties} + * that must be provided. Properties not listed in `required` are optional, + * while those listed must be filled. + * + * Below is an example of {@link properties} and `required`: + * + * ```typescript + * interface SomeObject { + * id: string; + * email: string; + * name?: string; + * } + * ``` + * + * As you can see, `id` and `email` {@link properties} are {@link required}, + * so they are listed in the `required` array. + * + * ```json + * { + * "type": "object", + * "properties": { + * "id": { "type": "string" }, + * "email": { "type": "string" }, + * "name": { "type": "string" } + * }, + * "required": ["id", "email"] + * } + * ``` + */ + required: string[]; } /** - * Type of function parameters. + * Reference type directing to named schema. * - * `ILlmSchema.IParameters` is a type defining a function's parameters as a - * keyworded object type. + * If {@link IConfig.strict} mode is enabled, its other properties like + * {@link description} would be disabled. Instead, the description would be + * placed in the parent type. For example, if this reference type is used as a + * property of an object, the description would be placed in the object + * place. + */ + export interface IReference extends IJsonSchemaAttribute { + /** + * Reference to the named schema. + * + * The `$ref` is a reference to a named schema. The format follows the JSON + * Pointer specification. In OpenAPI, the `$ref` starts with `#/$defs/` + * which indicates the type is stored in the + * {@link ILlmSchema.IParameters.$defs} object. + * + * - `#/$defs/SomeObject` + * - `#/$defs/AnotherObject` + */ + $ref: string; + } + + /** + * Union type. * - * It also can be utilized for the structured output metadata. + * `IAnyOf` represents a union type in TypeScript (`A | B | C`). * - * @reference https://platform.openai.com/docs/guides/structured-outputs + * For reference, even if your Swagger (or OpenAPI) document defines `oneOf` + * instead of `anyOf`, {@link ILlmSchema} forcibly converts it to the + * `anyOf`-based {@link ILlmSchema.IAnyOf} type. */ - export type IParameters = - ILlmSchema.ModelParameters[Model]; + export interface IAnyOf extends IJsonSchemaAttribute { + /** List of the union types. */ + anyOf: Exclude[]; - /** Configuration for the LLM schema composition. */ - export type IConfig = - ILlmSchema.ModelConfig[Model]; + /** + * Discriminator info of the union type. + * + * This discriminator is used to determine which type in the union should be + * used based on the value of a specific property. + */ + "x-discriminator"?: IAnyOf.IDiscriminator; + } + export namespace IAnyOf { + /** Discriminator info of the union type. */ + export interface IDiscriminator { + /** Property name for the discriminator. */ + propertyName: string; + + /** + * Mapping of discriminator values to schema names. + * + * This property is valid only for {@link IReference} typed + * {@link IAnyOf.anyOf} elements. Therefore, the `key` of `mapping` is the + * discriminator value, and the `value` of `mapping` is the schema name + * like `#/components/schemas/SomeObject`. + */ + mapping?: Record; + } + } + + /** Null type. */ + export interface INull extends IJsonSchemaAttribute.INull {} + + /** Unknown, the `any` type. */ + export interface IUnknown extends IJsonSchemaAttribute.IUnknown {} } diff --git a/src/structures/ILlmSchemaV3.ts b/src/structures/ILlmSchemaV3.ts deleted file mode 100644 index 8a41a3cf..00000000 --- a/src/structures/ILlmSchemaV3.ts +++ /dev/null @@ -1,410 +0,0 @@ -import { IJsonSchemaAttribute } from "./IJsonSchemaAttribute"; - -/** - * Type schema based on OpenAPI v3.0 for LLM function calling. - * - * `ILlmSchemaV3` is a type metadata for LLM (Large Language Model) function - * calling, based on the OpenAPI v3.0 specification. This type is not the final - * type for the LLM function calling, but the intermediate structure for the - * conversion to the final type like {@link IGeminiSchema}. - * - * `ILlmSchemaV3` basically follows the JSON schema definition of OpenAPI v3.0 - * specification; {@link OpenApiV3.IJsonSchema}. However, `ILlmSchemaV3` does not - * have the reference type; {@link OpenApiV3.IJsonSchema.IReference}. It's - * because the LLM cannot compose the reference typed arguments. If recursive - * type comes, its type would be repeated in - * {@link ILlmSchemaV3.IConfig.recursive} times. Otherwise you've configured it - * to `false`, the recursive types are not allowed. - * - * For reference, the OpenAPI v3.0 based JSON schema definition can't express - * the tuple array type. It has been supported since OpenAPI v3.1; - * {@link OpenApi.IJsonSchema.ITuple}. Therefore, it would better to avoid using - * the tuple array type in the LLM function calling. - * - * Also, if you configure {@link ILlmSchemaV3.IConfig.constraint} to `false`, - * these properties would be banned and written to the - * {@link ILlmSchemaV3.__IAttribute.description} property instead. It's because - * there are some LLM models which does not support the constraint properties. - * - * - {@link ILlmSchemaV3.INumber.minimum} - * - {@link ILlmSchemaV3.INumber.maximum} - * - {@link ILlmSchemaV3.INumber.multipleOf} - * - {@link ILlmSchemaV3.IString.minLength} - * - {@link ILlmSchemaV3.IString.maxLength} - * - {@link ILlmSchemaV3.IString.format} - * - {@link ILlmSchemaV3.IString.pattern} - * - {@link ILlmSchemaV3.IString.contentMediaType} - * - {@link ILlmSchemaV3.IArray.minItems} - * - {@link ILlmSchemaV3.IArray.maxItems} - * - {@link ILlmSchemaV3.IArray.unique} - * - * @author Jeongho Nam - https://github.com/samchon - * @reference https://platform.openai.com/docs/guides/function-calling - */ -export type ILlmSchemaV3 = - | ILlmSchemaV3.IBoolean - | ILlmSchemaV3.IInteger - | ILlmSchemaV3.INumber - | ILlmSchemaV3.IString - | ILlmSchemaV3.IArray - | ILlmSchemaV3.IObject - | ILlmSchemaV3.IUnknown - | ILlmSchemaV3.INullOnly - | ILlmSchemaV3.IOneOf; -export namespace ILlmSchemaV3 { - /** Configuration for OpenAPI v3.0 based LLM schema composition. */ - export interface IConfig { - /** - * Whether to allow constraint properties or not. - * - * If you configure this property to `false`, the schemas do not contain the - * constraint properties of below. Instead, below properties would be - * written to the {@link ILlmSchemaV3.__IAttribute.description} property as a - * comment string like `"@format uuid"`. - * - * This is because some LLM schema model like {@link IGeminiSchema} has - * banned such constraint, because their LLM cannot understand the - * constraint properties and occur the hallucination. - * - * Therefore, considering your LLM model's performance, capability, and the - * complexity of your parameter types, determine which is better, to allow - * the constraint properties or not. - * - * - {@link ILlmSchemaV3.INumber.minimum} - * - {@link ILlmSchemaV3.INumber.maximum} - * - {@link ILlmSchemaV3.INumber.multipleOf} - * - {@link ILlmSchemaV3.IString.minLength} - * - {@link ILlmSchemaV3.IString.maxLength} - * - {@link ILlmSchemaV3.IString.format} - * - {@link ILlmSchemaV3.IString.pattern} - * - {@link ILlmSchemaV3.IString.contentMediaType} - * - {@link ILlmSchemaV3.IString.default} - * - {@link ILlmSchemaV3.IArray.minItems} - * - {@link ILlmSchemaV3.IArray.maxItems} - * - {@link ILlmSchemaV3.IArray.unique} - * - * @default true - */ - constraint: boolean; - - /** - * Whether to allow recursive types or not. - * - * If allow, then how many times to repeat the recursive types. - * - * By the way, if the model is "chatgpt", the recursive types are always - * allowed without any limitation, due to it supports the reference type. - * - * @default 3 - */ - recursive: false | number; - } - - /** - * Type of the function parameters. - * - * `ILlmSchemaV3.IParameters` is a type defining a function's parameters as a - * keyworded object type. - * - * It also can be utilized for the structured output metadata. - * - * @reference https://platform.openai.com/docs/guides/structured-outputs - */ - export interface IParameters extends Omit { - /** - * Additional properties' info. - * - * The `additionalProperties` means the type schema info of the additional - * properties that are not listed in the {@link properties}. - * - * By the way, it is not allowed in the parameters level. - */ - additionalProperties: false; - } - - /** Boolean type schema info. */ - export interface IBoolean extends IJsonSchemaAttribute.IBoolean { - /** Whether to allow `null` value or not. */ - nullable?: boolean; - - /** Default value. */ - default?: boolean | null; - - /** Enumeration values. */ - enum?: Array; - } - - /** Integer type schema info. */ - export interface IInteger extends IJsonSchemaAttribute.IInteger { - /** Whether to allow `null` value or not. */ - nullable?: boolean; - - /** - * Default value. - * - * @type int64 - */ - default?: number | null; - - /** - * Enumeration values. - * - * @type int64 - */ - enum?: Array; - - /** - * Minimum value restriction. - * - * @type int64 - */ - minimum?: number; - - /** - * Maximum value restriction. - * - * @type int64 - */ - maximum?: number; - - /** Exclusive minimum value restriction. */ - exclusiveMinimum?: number; - - /** Exclusive maximum value restriction. */ - exclusiveMaximum?: number; - - /** - * Multiple of value restriction. - * - * @type uint64 - * @exclusiveMinimum 0 - */ - multipleOf?: number; - } - - /** Number type schema info. */ - export interface INumber extends IJsonSchemaAttribute.INumber { - /** Whether to allow `null` value or not. */ - nullable?: boolean; - - /** Default value. */ - default?: number | null; - - /** Enumeration values. */ - enum?: Array; - - /** Minimum value restriction. */ - minimum?: number; - - /** Maximum value restriction. */ - maximum?: number; - - /** Exclusive minimum value restriction. */ - exclusiveMinimum?: number; - - /** Exclusive maximum value restriction. */ - exclusiveMaximum?: number; - - /** - * Multiple of value restriction. - * - * @exclusiveMinimum 0 - */ - multipleOf?: number; - } - - /** String type schema info. */ - export interface IString extends IJsonSchemaAttribute.IString { - /** Whether to allow `null` value or not. */ - nullable?: boolean; - - /** Default value. */ - default?: string | null; - - /** Enumeration values. */ - enum?: Array; - - /** Format restriction. */ - format?: - | "binary" - | "byte" - | "password" - | "regex" - | "uuid" - | "email" - | "hostname" - | "idn-email" - | "idn-hostname" - | "iri" - | "iri-reference" - | "ipv4" - | "ipv6" - | "uri" - | "uri-reference" - | "uri-template" - | "url" - | "date-time" - | "date" - | "time" - | "duration" - | "json-pointer" - | "relative-json-pointer" - | (string & {}); - - /** Pattern restriction. */ - pattern?: string; - - /** - * Minimum length restriction. - * - * @type uint64 - */ - minLength?: number; - - /** - * Maximum length restriction. - * - * @type uint64 - */ - maxLength?: number; - - /** Content media type restriction. */ - contentMediaType?: string; - } - - /** Array type schema info. */ - export interface IArray extends IJsonSchemaAttribute.IArray { - /** Whether to allow `null` value or not. */ - nullable?: boolean; - - /** - * Items type schema info. - * - * The `items` means the type of the array elements. In other words, it is - * the type schema info of the `T` in the TypeScript array type `Array`. - */ - items: ILlmSchemaV3; - - /** - * Unique items restriction. - * - * If this property value is `true`, target array must have unique items. - */ - uniqueItems?: boolean; - - /** - * Minimum items restriction. - * - * Restriction of minimum number of items in the array. - * - * @type uint64 - */ - minItems?: number; - - /** - * Maximum items restriction. - * - * Restriction of maximum number of items in the array. - * - * @type uint64 - */ - maxItems?: number; - } - - /** Object type schema info. */ - export interface IObject extends IJsonSchemaAttribute.IObject { - /** Whether to allow `null` value or not. */ - nullable?: boolean; - - /** - * Properties of the object. - * - * The `properties` means a list of key-value pairs of the object's regular - * properties. The key is the name of the regular property, and the value is - * the type schema info. - * - * If you need additional properties that is represented by dynamic key, you - * can use the {@link additionalProperties} instead. - */ - properties: Record; - - /** - * List of key values of the required properties. - * - * The `required` means a list of the key values of the required - * {@link properties}. If some property key is not listed in the `required` - * list, it means that property is optional. Otherwise some property key - * exists in the `required` list, it means that the property must be - * filled. - * - * Below is an example of the {@link properties} and `required`. - * - * ```typescript - * interface SomeObject { - * id: string; - * email: string; - * name?: string; - * } - * ``` - * - * As you can see, `id` and `email` {@link properties} are {@link required}, - * so that they are listed in the `required` list. - * - * ```json - * { - * "type": "object", - * "properties": { - * "id": { "type": "string" }, - * "email": { "type": "string" }, - * "name": { "type": "string" } - * }, - * "required": ["id", "email"] - * } - * ``` - */ - required: string[]; - - /** - * Additional properties' info. - * - * The `additionalProperties` means the type schema info of the additional - * properties that are not listed in the {@link properties}. - * - * If the value is `true`, it means that the additional properties are not - * restricted. They can be any type. Otherwise, if the value is - * {@link ILlmSchemaV3} type, it means that the additional properties must - * follow the type schema info. - * - * - `true`: `Record` - * - `IOpenAiSchema`: `Record` - */ - additionalProperties?: boolean | ILlmSchemaV3; - } - - /** - * One of type schema info. - * - * `IOneOf` represents an union type of the TypeScript (`A | B | C`). - * - * For reference, even though your Swagger (or OpenAPI) document has defined - * `anyOf` instead of the `oneOf`, it has been forcibly converted to `oneOf` - * type by {@link OpenApi.convert OpenAPI conversion}. - */ - export interface IOneOf extends IJsonSchemaAttribute { - /** List of the union types. */ - oneOf: Exclude[]; - } - - /** Null only type schema info. */ - export interface INullOnly extends IJsonSchemaAttribute.INull { - /** Default value. */ - default?: null; - } - - /** - * Unknown type schema info. - * - * It means the type of the value is `any`. - */ - export interface IUnknown extends IJsonSchemaAttribute.IUnknown {} -} diff --git a/src/structures/ILlmSchemaV3_1.ts b/src/structures/ILlmSchemaV3_1.ts deleted file mode 100644 index 355c04e1..00000000 --- a/src/structures/ILlmSchemaV3_1.ts +++ /dev/null @@ -1,450 +0,0 @@ -import { IJsonSchemaAttribute } from "./IJsonSchemaAttribute"; - -/** - * Type schema based on OpenAPI v3.1 for LLM function calling. - * - * `ILlmSchemaV3_1` is type metadata for LLM (Large Language Model) function - * calling, based on the OpenAPI v3.1 specification. This type is not the final - * type for LLM function calling, but an intermediate structure for conversion - * to the final types: - * - * - {@link IChatGptSchema} - * - {@link IClaudeSchema} - * - {@link ILlamaSchema} - * - * However, `ILlmSchemaV3_1` does not follow the entire OpenAPI v3.1 - * specification. It has specific restrictions and definitions. Here are the - * differences between `ILlmSchemaV3_1` and the OpenAPI v3.1 JSON schema: - * - * - Decompose mixed type: {@link OpenApiV3_1.IJsonSchema.IMixed} - * - Resolve nullable property: - * {@link OpenApiV3_1.IJsonSchema.__ISignificant.nullable} - * - Constant type is banned: {@link OpenApiV3_1.IJsonSchema.IConstant} - * - Merge {@link OpenApiV3_1.IJsonSchema.IAnyOf} to {@link ILlmSchemaV3_1.IOneOf} - * - Merge {@link OpenApiV3_1.IJsonSchema.IAllOf} to {@link ILlmSchemaV3_1.IObject} - * - Merge {@link OpenApiV3_1.IJsonSchema.IRecursiveReference} to - * {@link ILlmSchemaV3_1.IReference} - * - Do not support {@link OpenApiV3_1.IJsonSchema.ITuple} type - * - * Compared to {@link OpenApi.IJsonSchema}, the emended JSON schema - * specification: - * - * - {@link ILlmSchemaV3_1.IParameters.$defs} instead of the - * {@link OpenApi.IJsonSchema.schemas} - * - Do not support {@link OpenApi.IJsonSchema.ITuple} type - * - {@link ILlmSchemaV3_1.properties} and {@link ILlmSchemaV3_1.required} are - * always defined - * - * For reference, if you've composed the `ILlmSchemaV3_1` type with the - * {@link ILlmSchemaV3_1.IConfig.reference} `false` option (default is `false`), - * only the recursived named types would be archived into the - * {@link ILlmSchemaV3_1.IParameters.$defs}, and the others would be ecaped from - * the {@link ILlmSchemaV3_1.IReference} type. - * - * Also, if you've composed the `ILlmSchemaV3_1` type with the - * {@link ILlmSchemaV3_1.IConfig.constraint} `false` option (default `false`), - * the `ILlmSchemaV3_1` would not compose these properties. Instead, these - * properties would be written on - * {@link ILlmSchemaV3_1.__IAttribute.descripotion} field like `@format uuid` - * case. - * - * - {@link ILlmSchemaV3_1.INumber.minimum} - * - {@link ILlmSchemaV3_1.INumber.maximum} - * - {@link ILlmSchemaV3_1.INumber.multipleOf} - * - {@link ILlmSchemaV3_1.IString.minLength} - * - {@link ILlmSchemaV3_1.IString.maxLength} - * - {@link ILlmSchemaV3_1.IString.format} - * - {@link ILlmSchemaV3_1.IString.pattern} - * - {@link ILlmSchemaV3_1.IString.contentMediaType} - * - {@link ILlmSchemaV3_1.IArray.minItems} - * - {@link ILlmSchemaV3_1.IArray.maxItems} - * - {@link ILlmSchemaV3_1.IArray.unique} - * - * @author Jeongho Nam - https://github.com/samchon - * @reference https://platform.openai.com/docs/guides/function-calling - * @reference https://platform.openai.com/docs/guides/structured-outputs - */ -export type ILlmSchemaV3_1 = - | ILlmSchemaV3_1.IConstant - | ILlmSchemaV3_1.IBoolean - | ILlmSchemaV3_1.IInteger - | ILlmSchemaV3_1.INumber - | ILlmSchemaV3_1.IString - | ILlmSchemaV3_1.IArray - | ILlmSchemaV3_1.IObject - | ILlmSchemaV3_1.IReference - | ILlmSchemaV3_1.IOneOf - | ILlmSchemaV3_1.INull - | ILlmSchemaV3_1.IUnknown; -export namespace ILlmSchemaV3_1 { - /** Configuration for OpenAPI v3.1 based LLM schema composition. */ - export interface IConfig { - /** - * Whether to allow constraint properties or not. - * - * If you configure this property to `false`, the schemas do not contain the - * constraint properties of below. Instead, below properties would be - * written to the {@link ILlmSchemaV3_1.__IAttribute.description} property as - * a comment string like `"@format uuid"`. - * - * This is because some LLM schema model like {@link IChatGptSchema} has - * banned such constraint, because their LLM cannot understand the - * constraint properties and occur the hallucination. - * - * Therefore, considering your LLM model's performance, capability, and the - * complexity of your parameter types, determine which is better, to allow - * the constraint properties or not. - * - * - {@link ILlmSchemaV3_1.INumber.minimum} - * - {@link ILlmSchemaV3_1.INumber.maximum} - * - {@link ILlmSchemaV3_1.INumber.multipleOf} - * - {@link ILlmSchemaV3_1.IString.minLength} - * - {@link ILlmSchemaV3_1.IString.maxLength} - * - {@link ILlmSchemaV3_1.IString.format} - * - {@link ILlmSchemaV3_1.IString.pattern} - * - {@link ILlmSchemaV3_1.IString.contentMediaType} - * - {@link ILlmSchemaV3_1.IString.default} - * - {@link ILlmSchemaV3_1.IArray.minItems} - * - {@link ILlmSchemaV3_1.IArray.maxItems} - * - {@link ILlmSchemaV3_1.IArray.unique} - * - * @default true - */ - constraint: boolean; - - /** - * Whether to allow reference type in everywhere. - * - * If you configure this property to `false`, most of reference types - * represented by {@link ILlmSchemaV3_1.IReference} would be escaped to a - * plain type unless recursive type case. - * - * This is because some low sized LLM models does not understand the - * reference type well, and even the large size LLM models sometimes occur - * the hallucination. - * - * However, the reference type makes the schema size smaller, so that - * reduces the LLM token cost. Therefore, if you're using the large size of - * LLM model, and want to reduce the LLM token cost, you can configure this - * property to `true`. - * - * @default true - */ - reference: boolean; - } - - /** - * Type of the function parameters. - * - * `ILlmSchemaV3_1.IParameters` is a type defining a function's parameters as - * a keyworded object type. - * - * It also can be utilized for the structured output metadata. - * - * @reference https://platform.openai.com/docs/guides/structured-outputs - */ - export interface IParameters extends Omit { - /** Collection of the named types. */ - $defs: Record; - - /** - * Additional properties' info. - * - * The `additionalProperties` means the type schema info of the additional - * properties that are not listed in the {@link properties}. - * - * By the way, it is not allowed in the parameters level. - */ - additionalProperties: false; - } - - /** Constant value type. */ - export interface IConstant extends IJsonSchemaAttribute { - /** The constant value. */ - const: boolean | number | string; - } - - /** Boolean type info. */ - export interface IBoolean extends IJsonSchemaAttribute.IBoolean { - /** The default value. */ - default?: boolean; - } - - /** Integer type info. */ - export interface IInteger extends IJsonSchemaAttribute.IInteger { - /** - * Default value. - * - * @type int64 - */ - default?: number; - - /** - * Minimum value restriction. - * - * @type int64 - */ - minimum?: number; - - /** - * Maximum value restriction. - * - * @type int64 - */ - maximum?: number; - - /** Exclusive minimum value restriction. */ - exclusiveMinimum?: number; - - /** Exclusive maximum value restriction. */ - exclusiveMaximum?: number; - - /** - * Multiple of value restriction. - * - * @type uint64 - * @exclusiveMinimum 0 - */ - multipleOf?: number; - } - - /** Number (double) type info. */ - export interface INumber extends IJsonSchemaAttribute.INumber { - /** Default value. */ - default?: number; - - /** Minimum value restriction. */ - minimum?: number; - - /** Maximum value restriction. */ - maximum?: number; - - /** Exclusive minimum value restriction. */ - exclusiveMinimum?: number; - - /** Exclusive maximum value restriction. */ - exclusiveMaximum?: number; - - /** - * Multiple of value restriction. - * - * @exclusiveMinimum 0 - */ - multipleOf?: number; - } - - /** String type info. */ - export interface IString extends IJsonSchemaAttribute.IString { - /** Default value. */ - default?: string; - - /** Format restriction. */ - format?: - | "binary" - | "byte" - | "password" - | "regex" - | "uuid" - | "email" - | "hostname" - | "idn-email" - | "idn-hostname" - | "iri" - | "iri-reference" - | "ipv4" - | "ipv6" - | "uri" - | "uri-reference" - | "uri-template" - | "url" - | "date-time" - | "date" - | "time" - | "duration" - | "json-pointer" - | "relative-json-pointer" - | (string & {}); - - /** Pattern restriction. */ - pattern?: string; - - /** Content media type restriction. */ - contentMediaType?: string; - - /** - * Minimum length restriction. - * - * @type uint64 - */ - minLength?: number; - - /** - * Maximum length restriction. - * - * @type uint64 - */ - maxLength?: number; - } - - /** Array type info. */ - export interface IArray extends IJsonSchemaAttribute.IArray { - /** - * Items type info. - * - * The `items` means the type of the array elements. In other words, it is - * the type schema info of the `T` in the TypeScript array type `Array`. - */ - items: ILlmSchemaV3_1; - - /** - * Unique items restriction. - * - * If this property value is `true`, target array must have unique items. - */ - uniqueItems?: boolean; - - /** - * Minimum items restriction. - * - * Restriction of minimum number of items in the array. - * - * @type uint64 - */ - minItems?: number; - - /** - * Maximum items restriction. - * - * Restriction of maximum number of items in the array. - * - * @type uint64 - */ - maxItems?: number; - } - - /** Object type info. */ - export interface IObject extends IJsonSchemaAttribute.IObject { - /** - * Properties of the object. - * - * The `properties` means a list of key-value pairs of the object's regular - * properties. The key is the name of the regular property, and the value is - * the type schema info. - * - * If you need additional properties that is represented by dynamic key, you - * can use the {@link additionalProperties} instead. - */ - properties: Record; - - /** - * Additional properties' info. - * - * The `additionalProperties` means the type schema info of the additional - * properties that are not listed in the {@link properties}. - * - * If the value is `true`, it means that the additional properties are not - * restricted. They can be any type. Otherwise, if the value is - * {@link ILlmSchemaV3_1} type, it means that the additional properties must - * follow the type schema info. - * - * - `true`: `Record` - * - `ILlmSchemaV3_1`: `Record` - */ - additionalProperties?: boolean | ILlmSchemaV3_1; - - /** - * List of key values of the required properties. - * - * The `required` means a list of the key values of the required - * {@link properties}. If some property key is not listed in the `required` - * list, it means that property is optional. Otherwise some property key - * exists in the `required` list, it means that the property must be - * filled. - * - * Below is an example of the {@link properties} and `required`. - * - * ```typescript - * interface SomeObject { - * id: string; - * email: string; - * name?: string; - * } - * ``` - * - * As you can see, `id` and `email` {@link properties} are {@link required}, - * so that they are listed in the `required` list. - * - * ```json - * { - * "type": "object", - * "properties": { - * "id": { "type": "string" }, - * "email": { "type": "string" }, - * "name": { "type": "string" } - * }, - * "required": ["id", "email"] - * } - * ``` - */ - required: string[]; - } - - /** Reference type directing named schema. */ - export interface IReference extends IJsonSchemaAttribute { - /** - * Reference to the named schema. - * - * The `ref` is a reference to the named schema. Format of the `$ref` is - * following the JSON Pointer specification. In the OpenAPI, the `$ref` - * starts with `#/$defs/` which means the type is stored in the - * {@link ILlmSchemaV3_1.IParameters.$defs} object. - * - * - `#/$defs/SomeObject` - * - `#/$defs/AnotherObject` - */ - $ref: string; - } - - /** - * Union type. - * - * `IOneOf` represents an union type of the TypeScript (`A | B | C`). - * - * For reference, even though your Swagger (or OpenAPI) document has defined - * `anyOf` instead of the `oneOf`, {@link OpenApi} forcibly converts it to - * `oneOf` type. - */ - export interface IOneOf extends IJsonSchemaAttribute { - /** List of the union types. */ - oneOf: Exclude[]; - - /** Discriminator info of the union type. */ - discriminator?: IOneOf.IDiscriminator; - } - export namespace IOneOf { - /** Discriminator info of the union type. */ - export interface IDiscriminator { - /** Property name for the discriminator. */ - propertyName: string; - - /** - * Mapping of the discriminator value to the schema name. - * - * This property is valid only for {@link IReference} typed - * {@link IOneOf.oneof} elements. Therefore, `key` of `mapping` is the - * discriminator value, and `value` of `mapping` is the schema name like - * `#/components/schemas/SomeObject`. - */ - mapping?: Record; - } - } - - /** Null type. */ - export interface INull extends IJsonSchemaAttribute.INull { - /** Default value. */ - default?: null; - } - - /** Unknown, the `any` type. */ - export interface IUnknown extends IJsonSchemaAttribute.IUnknown {} -} diff --git a/src/structures/IMcpLlmApplication.ts b/src/structures/IMcpLlmApplication.ts index ca93d12e..fa191557 100644 --- a/src/structures/IMcpLlmApplication.ts +++ b/src/structures/IMcpLlmApplication.ts @@ -8,14 +8,14 @@ import { IMcpLlmFunction } from "./IMcpLlmFunction"; * {@link IMcpLlmFunction LLM function calling schemas} composed from the MCP * (Model Context Protocol) document. It contains * {@link IMcpLlmApplication.errors failed functions}, and adjusted - * {@link IMcpLlmApplication.options options} during the `IMcpLlmApplication` + * {@link IMcpLlmApplication.config options} during the `IMcpLlmApplication` * construction. * * About each function of MCP server, there can be {@link errors} during the - * composition, if the target {@link model} does not support the function's - * {@link IMcpLlmFunction.parameters} type. For example, Google Gemini model does - * not support union type, so that the function containing the union type would - * be placed into the {@link errors} list instead of {@link functions}. + * composition, if the function's {@link IMcpLlmFunction.parameters} type has + * some unsupported types for the LLM function calling. In that case, the + * function would be placed into the {@link errors} list instead of + * {@link functions}. * * Also, each function has its own {@link IMcpLlmFunction.validate} function for * correcting AI agent's mistakes, and this is the reason why `@samchon/openapi` @@ -27,42 +27,38 @@ import { IMcpLlmFunction } from "./IMcpLlmFunction"; * @author Jeongho Nam - https://github.com/samchon * @author Byeongjin Oh - https://github.com/sunrabbit123 */ -export interface IMcpLlmApplication { - /** Model of the target LLM. */ - model: Model; - +export interface IMcpLlmApplication { /** * List of function metadata. * * List of function metadata that can be used for the LLM function call. */ - functions: IMcpLlmFunction[]; + functions: IMcpLlmFunction[]; /** List of errors occurred during the composition. */ errors: IMcpLlmApplication.IError[]; /** Configuration for the application. */ - options: IMcpLlmApplication.IOptions; + config: IMcpLlmApplication.IConfig; } export namespace IMcpLlmApplication { - /** Options for the HTTP LLM application schema composition. */ - export type IOptions = - ILlmSchema.ModelConfig[Model] & { - /** - * Maximum length of function name. - * - * When a function name is longer than this value, it will be truncated. - * - * If not possible to truncate due to the duplication, the function name - * would be modified to randomly generated (UUID v4). - * - * @default 64 - */ - maxLength?: number; + /** Configuration for the HTTP LLM application schema composition. */ + export interface IConfig extends ILlmSchema.IConfig { + /** + * Maximum length of function name. + * + * When a function name is longer than this value, it will be truncated. + * + * If not possible to truncate due to the duplication, the function name + * would be modified to randomly generated (UUID v4). + * + * @default 64 + */ + maxLength: number; - /** Whether to disallow superfluous properties or not. */ - equals?: boolean; - }; + /** Whether to disallow superfluous properties or not. */ + equals: boolean; + } /** Error occurred in the composition. */ export interface IError { diff --git a/src/structures/IMcpLlmController.ts b/src/structures/IMcpLlmController.ts index 811c0510..78f1158d 100644 --- a/src/structures/IMcpLlmController.ts +++ b/src/structures/IMcpLlmController.ts @@ -1,4 +1,3 @@ -import { ILlmSchema } from "./ILlmSchema"; import { IMcpLlmApplication } from "./IMcpLlmApplication"; /** @@ -41,7 +40,6 @@ import { IMcpLlmApplication } from "./IMcpLlmApplication"; * controllers: [ * await assertMcpController({ * name: "calculator", - * model: "chatgpt", * client, * }), * ], @@ -60,7 +58,7 @@ import { IMcpLlmApplication } from "./IMcpLlmApplication"; * @author Byeongjin Oh - https://github.com/sunrabbit123 * @reference https://wrtnlabs.io/agentica/docs/core/controller/mcp/ */ -export interface IMcpLlmController { +export interface IMcpLlmController { /** Protocol discriminator. */ protocol: "mcp"; @@ -68,8 +66,7 @@ export interface IMcpLlmController { name: string; /** Application schema of function calling. */ - application: IMcpLlmApplication; - + application: IMcpLlmApplication; /** * MCP client for connection. * diff --git a/src/structures/IMcpLlmFunction.ts b/src/structures/IMcpLlmFunction.ts index 1c296392..12bb5af6 100644 --- a/src/structures/IMcpLlmFunction.ts +++ b/src/structures/IMcpLlmFunction.ts @@ -30,7 +30,7 @@ import { IValidation } from "./IValidation"; * @author Jeongho Nam - https://github.com/samchon * @author Byeongjin Oh - https://github.com/sunrabbit123 */ -export interface IMcpLlmFunction { +export interface IMcpLlmFunction { /** * Representative name of the function. * @@ -45,14 +45,14 @@ export interface IMcpLlmFunction { * purpose of the function to LLMs (Large Language Models). LLMs use this * description to determine which function to call. * - * Also, when the LLM converses with the user, the `description` explains - * the function to the user. Therefore, the `description` property has the - * highest priority and should be carefully considered. + * Also, when the LLM converses with the user, the `description` explains the + * function to the user. Therefore, the `description` property has the highest + * priority and should be carefully considered. */ description?: string | undefined; /** Parameters of the function. */ - parameters: ILlmSchema.IParameters; + parameters: ILlmSchema.IParameters; /** * Validate function of the arguments. @@ -72,14 +72,6 @@ export interface IMcpLlmFunction { * rate soars to 99% at the second trial, and I've never failed at the third * trial. * - * > If you've {@link separated} parameters, use the - * > {@link IMcpLlmFunction.ISeparated.validate} function instead when validating - * > the LLM composed arguments. - * - * > In that case, This `validate` function would be meaningful only when you've - * > merged the LLM and human composed arguments by - * > {@link McpLlm.mergeParameters} function. - * * @param args Arguments to validate * @returns Validation result */ diff --git a/src/utils/ChatGptTypeChecker.ts b/src/utils/ChatGptTypeChecker.ts deleted file mode 100644 index 21d70db4..00000000 --- a/src/utils/ChatGptTypeChecker.ts +++ /dev/null @@ -1,391 +0,0 @@ -import { IChatGptSchema } from "../structures/IChatGptSchema"; -import { MapUtil } from "./MapUtil"; - -/** - * Type checker for ChatGPT type schema. - * - * `ChatGptTypeChecker` is a type checker of {@link IChatGptSchema}. - * - * @author Jeongho Nam - https://github.com/samchon - */ -export namespace ChatGptTypeChecker { - /* ----------------------------------------------------------- - TYPE CHECKERS - ----------------------------------------------------------- */ - /** - * Test whether the schema is a null type. - * - * @param schema Target schema - * @returns Whether null type or not - */ - export const isNull = ( - schema: IChatGptSchema, - ): schema is IChatGptSchema.INull => - (schema as IChatGptSchema.INull).type === "null"; - - /** - * Test whether the schema is an unknown type. - * - * @param schema Target schema - * @returns Whether unknown type or not - */ - export const isUnknown = ( - schema: IChatGptSchema, - ): schema is IChatGptSchema.IUnknown => - (schema as IChatGptSchema.IUnknown).type === undefined && - !isAnyOf(schema) && - !isReference(schema); - - /** - * Test whether the schema is a boolean type. - * - * @param schema Target schema - * @returns Whether boolean type or not - */ - export const isBoolean = ( - schema: IChatGptSchema, - ): schema is IChatGptSchema.IBoolean => - (schema as IChatGptSchema.IBoolean).type === "boolean"; - - /** - * Test whether the schema is an integer type. - * - * @param schema Target schema - * @returns Whether integer type or not - */ - export const isInteger = ( - schema: IChatGptSchema, - ): schema is IChatGptSchema.IInteger => - (schema as IChatGptSchema.IInteger).type === "integer"; - - /** - * Test whether the schema is a number type. - * - * @param schema Target schema - * @returns Whether number type or not - */ - export const isNumber = ( - schema: IChatGptSchema, - ): schema is IChatGptSchema.INumber => - (schema as IChatGptSchema.INumber).type === "number"; - - /** - * Test whether the schema is a string type. - * - * @param schema Target schema - * @returns Whether string type or not - */ - export const isString = ( - schema: IChatGptSchema, - ): schema is IChatGptSchema.IString => - (schema as IChatGptSchema.IString).type === "string"; - - /** - * Test whether the schema is an array type. - * - * @param schema Target schema - * @returns Whether array type or not - */ - export const isArray = ( - schema: IChatGptSchema, - ): schema is IChatGptSchema.IArray => - (schema as IChatGptSchema.IArray).type === "array" && - (schema as IChatGptSchema.IArray).items !== undefined; - - /** - * Test whether the schema is an object type. - * - * @param schema Target schema - * @returns Whether object type or not - */ - export const isObject = ( - schema: IChatGptSchema, - ): schema is IChatGptSchema.IObject => - (schema as IChatGptSchema.IObject).type === "object"; - - /** - * Test whether the schema is a reference type. - * - * @param schema Target schema - * @returns Whether reference type or not - */ - export const isReference = ( - schema: IChatGptSchema, - ): schema is IChatGptSchema.IReference => (schema as any).$ref !== undefined; - - /** - * Test whether the schema is an union type. - * - * @param schema Target schema - * @returns Whether union type or not - */ - export const isAnyOf = ( - schema: IChatGptSchema, - ): schema is IChatGptSchema.IAnyOf => - (schema as IChatGptSchema.IAnyOf).anyOf !== undefined; - - /* ----------------------------------------------------------- - OPERATORS - ----------------------------------------------------------- */ - /** - * Visit every nested schemas. - * - * Visit every nested schemas of the target, and apply the `props.closure` - * function. - * - * Here is the list of occurring nested visitings: - * - * - {@link IChatGptSchema.IAnyOf.anyOf} - * - {@link IChatGptSchema.IReference} - * - {@link IChatGptSchema.IObject.properties} - * - {@link IChatGptSchema.IArray.items} - * - * @param props Properties for visiting - */ - export const visit = (props: { - closure: (schema: IChatGptSchema, accessor: string) => void; - $defs?: Record | undefined; - schema: IChatGptSchema; - accessor?: string; - refAccessor?: string; - }): void => { - const already: Set = new Set(); - const refAccessor: string = props.refAccessor ?? "$input.$defs"; - const next = (schema: IChatGptSchema, accessor: string): void => { - props.closure(schema, accessor); - if (ChatGptTypeChecker.isReference(schema)) { - const key: string = schema.$ref.split("#/$defs/").pop()!; - if (already.has(key) === true) return; - already.add(key); - const found: IChatGptSchema | undefined = props.$defs?.[key]; - if (found !== undefined) next(found, `${refAccessor}[${key}]`); - } else if (ChatGptTypeChecker.isAnyOf(schema)) - schema.anyOf.forEach((s, i) => next(s, `${accessor}.anyOf[${i}]`)); - else if (ChatGptTypeChecker.isObject(schema)) { - for (const [key, value] of Object.entries(schema.properties)) - next(value, `${accessor}.properties[${JSON.stringify(key)}]`); - if ( - typeof schema.additionalProperties === "object" && - schema.additionalProperties !== null - ) - next(schema.additionalProperties, `${accessor}.additionalProperties`); - } else if (ChatGptTypeChecker.isArray(schema)) - next(schema.items, `${accessor}.items`); - }; - next(props.schema, props.accessor ?? "$input.schemas"); - }; - - /** - * Test whether the `x` schema covers the `y` schema. - * - * @param props Properties for testing - * @returns Whether the `x` schema covers the `y` schema - */ - export const covers = (props: { - $defs?: Record | undefined; - x: IChatGptSchema; - y: IChatGptSchema; - }): boolean => - coverStation({ - $defs: props.$defs, - x: props.x, - y: props.y, - visited: new Map(), - }); - - const coverStation = (p: { - $defs?: Record | undefined; - visited: Map>; - x: IChatGptSchema; - y: IChatGptSchema; - }): boolean => { - const cache: boolean | undefined = p.visited.get(p.x)?.get(p.y); - if (cache !== undefined) return cache; - - // FOR RECURSIVE CASE - const nested: Map = MapUtil.take(p.visited)(p.x)( - () => new Map(), - ); - nested.set(p.y, true); - - // COMPUTE IT - const result: boolean = coverSchema(p); - nested.set(p.y, result); - return result; - }; - - const coverSchema = (p: { - $defs?: Record | undefined; - visited: Map>; - x: IChatGptSchema; - y: IChatGptSchema; - }): boolean => { - // CHECK EQUALITY - if (p.x === p.y) return true; - else if (isReference(p.x) && isReference(p.y) && p.x.$ref === p.y.$ref) - return true; - - // COMPARE WITH FLATTENING - const alpha: IChatGptSchema[] = flatSchema(p.$defs, p.x); - const beta: IChatGptSchema[] = flatSchema(p.$defs, p.y); - if (alpha.some((x) => isUnknown(x))) return true; - else if (beta.some((x) => isUnknown(x))) return false; - return beta.every((b) => - alpha.some((a) => - coverEscapedSchema({ - $defs: p.$defs, - visited: p.visited, - x: a, - y: b, - }), - ), - ); - }; - - const coverEscapedSchema = (p: { - $defs?: Record | undefined; - visited: Map>; - x: IChatGptSchema; - y: IChatGptSchema; - }): boolean => { - // CHECK EQUALITY - if (p.x === p.y) return true; - else if (isUnknown(p.x)) return true; - else if (isUnknown(p.y)) return false; - else if (isNull(p.x)) return isNull(p.y); - // ATOMIC CASE - else if (isBoolean(p.x)) return isBoolean(p.y) && coverBoolean(p.x, p.y); - else if (isInteger(p.x)) return isInteger(p.y) && coverInteger(p.x, p.y); - else if (isNumber(p.x)) return isNumber(p.y) && coverNumber(p.x, p.y); - else if (isString(p.x)) return isString(p.y) && coverString(p.x, p.y); - // INSTANCE CASE - else if (isArray(p.x)) - return ( - isArray(p.y) && - coverArray({ - $defs: p.$defs, - visited: p.visited, - x: p.x, - y: p.y, - }) - ); - else if (isObject(p.x)) - return ( - isObject(p.y) && - coverObject({ - $defs: p.$defs, - visited: p.visited, - x: p.x, - y: p.y, - }) - ); - else if (isReference(p.x)) return isReference(p.y) && p.x.$ref === p.y.$ref; - return false; - }; - - const coverArray = (p: { - $defs?: Record | undefined; - visited: Map>; - x: IChatGptSchema.IArray; - y: IChatGptSchema.IArray; - }): boolean => - coverStation({ - $defs: p.$defs, - visited: p.visited, - x: p.x.items, - y: p.y.items, - }); - - const coverObject = (p: { - $defs?: Record | undefined; - visited: Map>; - x: IChatGptSchema.IObject; - y: IChatGptSchema.IObject; - }): boolean => { - if (!p.x.additionalProperties && !!p.y.additionalProperties) return false; - else if ( - !!p.x.additionalProperties && - !!p.y.additionalProperties && - ((typeof p.x.additionalProperties === "object" && - p.y.additionalProperties === true) || - (typeof p.x.additionalProperties === "object" && - typeof p.y.additionalProperties === "object" && - !coverStation({ - $defs: p.$defs, - visited: p.visited, - x: p.x.additionalProperties, - y: p.y.additionalProperties, - }))) - ) - return false; - return Object.entries(p.y.properties ?? {}).every(([key, b]) => { - const a: IChatGptSchema | undefined = p.x.properties?.[key]; - if (a === undefined) return false; - else if ( - (p.x.required?.includes(key) ?? false) === true && - (p.y.required?.includes(key) ?? false) === false - ) - return false; - return coverStation({ - $defs: p.$defs, - visited: p.visited, - x: a, - y: b, - }); - }); - }; - - const coverBoolean = ( - x: IChatGptSchema.IBoolean, - y: IChatGptSchema.IBoolean, - ): boolean => { - if (!!x.enum?.length) - return !!y.enum?.length && y.enum.every((v) => x.enum!.includes(v)); - return true; - }; - - const coverInteger = ( - x: IChatGptSchema.IInteger, - y: IChatGptSchema.IInteger, - ): boolean => { - if (!!x.enum?.length) - return !!y.enum?.length && y.enum.every((v) => x.enum!.includes(v)); - return x.type === y.type; - }; - - const coverNumber = ( - x: IChatGptSchema.INumber, - y: IChatGptSchema.IInteger | IChatGptSchema.INumber, - ): boolean => { - if (!!x.enum?.length) - return !!y.enum?.length && y.enum.every((v) => x.enum!.includes(v)); - return x.type === y.type || (x.type === "number" && y.type === "integer"); - }; - - const coverString = ( - x: IChatGptSchema.IString, - y: IChatGptSchema.IString, - ): boolean => { - if (!!x.enum?.length) - return !!y.enum?.length && y.enum.every((v) => x.enum!.includes(v)); - return x.type === y.type; - }; - - const flatSchema = ( - $defs: Record | undefined, - schema: IChatGptSchema, - ): IChatGptSchema[] => { - schema = escapeReference($defs, schema); - if (isAnyOf(schema)) - return schema.anyOf.map((v) => flatSchema($defs, v)).flat(); - return [schema]; - }; - - const escapeReference = ( - $defs: Record | undefined, - schema: IChatGptSchema, - ): Exclude => - isReference(schema) - ? escapeReference($defs, $defs![schema.$ref.replace("#/$defs/", "")]!) - : schema; -} diff --git a/src/utils/ClaudeTypeChecker.ts b/src/utils/ClaudeTypeChecker.ts deleted file mode 100644 index 718e05f9..00000000 --- a/src/utils/ClaudeTypeChecker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { LlmTypeCheckerV3_1 } from "./LlmTypeCheckerV3_1"; - -/** - * Type checker for Claude type schema. - * - * `ClaudeTypeChecker` is an alias for {@link LlmTypeCheckerV3_1} since Claude - * supports the same schema structure as LLM Schema v3.1. - * - * @author Jeongho Nam - https://github.com/samchon - */ -export const ClaudeTypeChecker = LlmTypeCheckerV3_1; diff --git a/src/utils/DeepSeekTypeChecker.ts b/src/utils/DeepSeekTypeChecker.ts deleted file mode 100644 index b223dcf4..00000000 --- a/src/utils/DeepSeekTypeChecker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { LlmTypeCheckerV3_1 } from "./LlmTypeCheckerV3_1"; - -/** - * Type checker for DeepSeek type schema. - * - * `DeepSeekTypeChecker` is an alias for {@link LlmTypeCheckerV3_1} since DeepSeek - * supports the same schema structure as LLM Schema v3.1. - * - * @author Jeongho Nam - https://github.com/samchon - */ -export const DeepSeekTypeChecker = LlmTypeCheckerV3_1; diff --git a/src/utils/LlamaTypeChecker.ts b/src/utils/LlamaTypeChecker.ts deleted file mode 100644 index de791101..00000000 --- a/src/utils/LlamaTypeChecker.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { LlmTypeCheckerV3_1 } from "./LlmTypeCheckerV3_1"; - -/** - * Type checker for Llama type schema. - * - * `LlamaTypeChecker` is an alias for {@link LlmTypeCheckerV3_1} since Llama - * supports the same schema structure as LLM Schema v3.1. - * - * @author Jeongho Nam - https://github.com/samchon - */ -export const LlamaTypeChecker = LlmTypeCheckerV3_1; diff --git a/src/utils/LlmDataMerger.ts b/src/utils/LlmDataMerger.ts index 5fc19eda..4145ca67 100644 --- a/src/utils/LlmDataMerger.ts +++ b/src/utils/LlmDataMerger.ts @@ -1,5 +1,4 @@ import { ILlmFunction } from "../structures/ILlmFunction"; -import { ILlmSchema } from "../structures/ILlmSchema"; /** * Data combiner for LLM function call. @@ -8,9 +7,9 @@ import { ILlmSchema } from "../structures/ILlmSchema"; */ export namespace LlmDataMerger { /** Properties of {@link parameters} function. */ - export interface IProps { + export interface IProps { /** Target function to call. */ - function: ILlmFunction; + function: ILlmFunction; /** Arguments composed by LLM (Large Language Model). */ llm: object | null; @@ -22,25 +21,23 @@ export namespace LlmDataMerger { /** * Combine LLM and human arguments into one. * - * When you composes {@link IOpenAiDocument} with - * {@link IOpenAiDocument.IOptions.separate} option, then the arguments of the - * target function would be separated into two parts; LLM (Large Language + * When you compose {@link IHttpLlmApplication} with + * {@link IHttpLlmApplication.IConfig.separate} option, then the arguments of + * the target function would be separated into two parts; LLM (Large Language * Model) and human. * * In that case, you can combine both LLM and human composed arguments into * one by utilizing this {@link LlmDataMerger.parameters} function, referencing - * the target function metadata {@link IOpenAiFunction.separated}. + * the target function metadata {@link ILlmFunction.separated}. * * @param props Properties to combine LLM and human arguments with metadata. * @returns Combined arguments */ - export const parameters = ( - props: IProps, - ): object => { + export const parameters = (props: IProps): object => { const separated = props.function.separated; if (separated === undefined) throw new Error( - "Error on OpenAiDataComposer.parameters(): the function parameters are not separated.", + "Error on LlmDataMerger.parameters(): the function parameters are not separated.", ); return value(props.llm, props.human) as object; }; diff --git a/src/utils/GeminiTypeChecker.ts b/src/utils/LlmTypeChecker.ts similarity index 66% rename from src/utils/GeminiTypeChecker.ts rename to src/utils/LlmTypeChecker.ts index b1d1fe8d..3e819f1b 100644 --- a/src/utils/GeminiTypeChecker.ts +++ b/src/utils/LlmTypeChecker.ts @@ -1,15 +1,19 @@ -import { IGeminiSchema } from "../structures/IGeminiSchema"; +import { ILlmSchema } from "../structures/ILlmSchema"; import { MapUtil } from "./MapUtil"; import { OpenApiTypeCheckerBase } from "./internal/OpenApiTypeCheckerBase"; /** - * Type checker for Gemini type schema. + * Type checker for LLM function calling schema. * - * `GeminiTypeChecker` is a type checker of {@link IGeminiSchema}. + * `LlmTypeChecker` is a type checker of {@link ILlmSchema}, the type schema for + * LLM (Large Language Model) function calling. + * + * This checker provides type guard functions for validating schema types, and + * operators for traversing and comparing schemas. * * @author Jeongho Nam - https://github.com/samchon */ -export namespace GeminiTypeChecker { +export namespace LlmTypeChecker { /* ----------------------------------------------------------- TYPE CHECKERS ----------------------------------------------------------- */ @@ -19,10 +23,8 @@ export namespace GeminiTypeChecker { * @param schema Target schema * @returns Whether null type or not */ - export const isNull = ( - schema: IGeminiSchema, - ): schema is IGeminiSchema.INull => - (schema as IGeminiSchema.INull).type === "null"; + export const isNull = (schema: ILlmSchema): schema is ILlmSchema.INull => + (schema as ILlmSchema.INull).type === "null"; /** * Test whether the schema is an unknown type. @@ -31,9 +33,9 @@ export namespace GeminiTypeChecker { * @returns Whether unknown type or not */ export const isUnknown = ( - schema: IGeminiSchema, - ): schema is IGeminiSchema.IUnknown => - (schema as IGeminiSchema.IUnknown).type === undefined && + schema: ILlmSchema, + ): schema is ILlmSchema.IUnknown => + (schema as ILlmSchema.IUnknown).type === undefined && !isAnyOf(schema) && !isReference(schema); @@ -44,9 +46,9 @@ export namespace GeminiTypeChecker { * @returns Whether boolean type or not */ export const isBoolean = ( - schema: IGeminiSchema, - ): schema is IGeminiSchema.IBoolean => - (schema as IGeminiSchema.IBoolean).type === "boolean"; + schema: ILlmSchema, + ): schema is ILlmSchema.IBoolean => + (schema as ILlmSchema.IBoolean).type === "boolean"; /** * Test whether the schema is an integer type. @@ -55,9 +57,9 @@ export namespace GeminiTypeChecker { * @returns Whether integer type or not */ export const isInteger = ( - schema: IGeminiSchema, - ): schema is IGeminiSchema.IInteger => - (schema as IGeminiSchema.IInteger).type === "integer"; + schema: ILlmSchema, + ): schema is ILlmSchema.IInteger => + (schema as ILlmSchema.IInteger).type === "integer"; /** * Test whether the schema is a number type. @@ -65,10 +67,8 @@ export namespace GeminiTypeChecker { * @param schema Target schema * @returns Whether number type or not */ - export const isNumber = ( - schema: IGeminiSchema, - ): schema is IGeminiSchema.INumber => - (schema as IGeminiSchema.INumber).type === "number"; + export const isNumber = (schema: ILlmSchema): schema is ILlmSchema.INumber => + (schema as ILlmSchema.INumber).type === "number"; /** * Test whether the schema is a string type. @@ -76,10 +76,8 @@ export namespace GeminiTypeChecker { * @param schema Target schema * @returns Whether string type or not */ - export const isString = ( - schema: IGeminiSchema, - ): schema is IGeminiSchema.IString => - (schema as IGeminiSchema.IString).type === "string"; + export const isString = (schema: ILlmSchema): schema is ILlmSchema.IString => + (schema as ILlmSchema.IString).type === "string"; /** * Test whether the schema is an array type. @@ -87,11 +85,9 @@ export namespace GeminiTypeChecker { * @param schema Target schema * @returns Whether array type or not */ - export const isArray = ( - schema: IGeminiSchema, - ): schema is IGeminiSchema.IArray => - (schema as IGeminiSchema.IArray).type === "array" && - (schema as IGeminiSchema.IArray).items !== undefined; + export const isArray = (schema: ILlmSchema): schema is ILlmSchema.IArray => + (schema as ILlmSchema.IArray).type === "array" && + (schema as ILlmSchema.IArray).items !== undefined; /** * Test whether the schema is an object type. @@ -99,10 +95,8 @@ export namespace GeminiTypeChecker { * @param schema Target schema * @returns Whether object type or not */ - export const isObject = ( - schema: IGeminiSchema, - ): schema is IGeminiSchema.IObject => - (schema as IGeminiSchema.IObject).type === "object"; + export const isObject = (schema: ILlmSchema): schema is ILlmSchema.IObject => + (schema as ILlmSchema.IObject).type === "object"; /** * Test whether the schema is a reference type. @@ -111,19 +105,17 @@ export namespace GeminiTypeChecker { * @returns Whether reference type or not */ export const isReference = ( - schema: IGeminiSchema, - ): schema is IGeminiSchema.IReference => (schema as any).$ref !== undefined; + schema: ILlmSchema, + ): schema is ILlmSchema.IReference => (schema as any).$ref !== undefined; /** - * Test whether the schema is an union type. + * Test whether the schema is a union type. * * @param schema Target schema * @returns Whether union type or not */ - export const isAnyOf = ( - schema: IGeminiSchema, - ): schema is IGeminiSchema.IAnyOf => - (schema as IGeminiSchema.IAnyOf).anyOf !== undefined; + export const isAnyOf = (schema: ILlmSchema): schema is ILlmSchema.IAnyOf => + (schema as ILlmSchema.IAnyOf).anyOf !== undefined; /* ----------------------------------------------------------- OPERATORS @@ -136,33 +128,33 @@ export namespace GeminiTypeChecker { * * Here is the list of occurring nested visitings: * - * - {@link IGeminiSchema.IAnyOf.anyOf} - * - {@link IGeminiSchema.IReference} - * - {@link IGeminiSchema.IObject.properties} - * - {@link IGeminiSchema.IArray.items} + * - {@link ILlmSchema.IAnyOf.anyOf} + * - {@link ILlmSchema.IReference} + * - {@link ILlmSchema.IObject.properties} + * - {@link ILlmSchema.IArray.items} * * @param props Properties for visiting */ export const visit = (props: { - closure: (schema: IGeminiSchema, accessor: string) => void; - $defs?: Record | undefined; - schema: IGeminiSchema; + closure: (schema: ILlmSchema, accessor: string) => void; + $defs?: Record | undefined; + schema: ILlmSchema; accessor?: string; refAccessor?: string; }): void => { const already: Set = new Set(); const refAccessor: string = props.refAccessor ?? "$input.$defs"; - const next = (schema: IGeminiSchema, accessor: string): void => { + const next = (schema: ILlmSchema, accessor: string): void => { props.closure(schema, accessor); - if (GeminiTypeChecker.isReference(schema)) { + if (LlmTypeChecker.isReference(schema)) { const key: string = schema.$ref.split("#/$defs/").pop()!; if (already.has(key) === true) return; already.add(key); - const found: IGeminiSchema | undefined = props.$defs?.[key]; + const found: ILlmSchema | undefined = props.$defs?.[key]; if (found !== undefined) next(found, `${refAccessor}[${key}]`); - } else if (GeminiTypeChecker.isAnyOf(schema)) + } else if (LlmTypeChecker.isAnyOf(schema)) schema.anyOf.forEach((s, i) => next(s, `${accessor}.anyOf[${i}]`)); - else if (GeminiTypeChecker.isObject(schema)) { + else if (LlmTypeChecker.isObject(schema)) { for (const [key, value] of Object.entries(schema.properties)) next(value, `${accessor}.properties[${JSON.stringify(key)}]`); if ( @@ -170,7 +162,7 @@ export namespace GeminiTypeChecker { schema.additionalProperties !== null ) next(schema.additionalProperties, `${accessor}.additionalProperties`); - } else if (GeminiTypeChecker.isArray(schema)) + } else if (LlmTypeChecker.isArray(schema)) next(schema.items, `${accessor}.items`); }; next(props.schema, props.accessor ?? "$input.schemas"); @@ -183,9 +175,9 @@ export namespace GeminiTypeChecker { * @returns Whether the `x` schema covers the `y` schema */ export const covers = (props: { - $defs?: Record | undefined; - x: IGeminiSchema; - y: IGeminiSchema; + $defs?: Record | undefined; + x: ILlmSchema; + y: ILlmSchema; }): boolean => coverStation({ $defs: props.$defs, @@ -195,16 +187,16 @@ export namespace GeminiTypeChecker { }); const coverStation = (p: { - $defs?: Record | undefined; - visited: Map>; - x: IGeminiSchema; - y: IGeminiSchema; + $defs?: Record | undefined; + visited: Map>; + x: ILlmSchema; + y: ILlmSchema; }): boolean => { const cache: boolean | undefined = p.visited.get(p.x)?.get(p.y); if (cache !== undefined) return cache; // FOR RECURSIVE CASE - const nested: Map = MapUtil.take(p.visited)(p.x)( + const nested: Map = MapUtil.take(p.visited)(p.x)( () => new Map(), ); nested.set(p.y, true); @@ -216,10 +208,10 @@ export namespace GeminiTypeChecker { }; const coverSchema = (p: { - $defs?: Record | undefined; - visited: Map>; - x: IGeminiSchema; - y: IGeminiSchema; + $defs?: Record | undefined; + visited: Map>; + x: ILlmSchema; + y: ILlmSchema; }): boolean => { // CHECK EQUALITY if (p.x === p.y) return true; @@ -227,8 +219,8 @@ export namespace GeminiTypeChecker { return true; // COMPARE WITH FLATTENING - const alpha: IGeminiSchema[] = flatSchema(p.$defs, p.x); - const beta: IGeminiSchema[] = flatSchema(p.$defs, p.y); + const alpha: ILlmSchema[] = flatSchema(p.$defs, p.x); + const beta: ILlmSchema[] = flatSchema(p.$defs, p.y); if (alpha.some((x) => isUnknown(x))) return true; else if (beta.some((x) => isUnknown(x))) return false; return beta.every((b) => @@ -244,10 +236,10 @@ export namespace GeminiTypeChecker { }; const coverEscapedSchema = (p: { - $defs?: Record | undefined; - visited: Map>; - x: IGeminiSchema; - y: IGeminiSchema; + $defs?: Record | undefined; + visited: Map>; + x: ILlmSchema; + y: ILlmSchema; }): boolean => { // CHECK EQUALITY if (p.x === p.y) return true; @@ -285,10 +277,10 @@ export namespace GeminiTypeChecker { }; const coverArray = (p: { - $defs?: Record | undefined; - visited: Map>; - x: IGeminiSchema.IArray; - y: IGeminiSchema.IArray; + $defs?: Record | undefined; + visited: Map>; + x: ILlmSchema.IArray; + y: ILlmSchema.IArray; }): boolean => { if ( !( @@ -310,13 +302,13 @@ export namespace GeminiTypeChecker { x: p.x.items, y: p.y.items, }); - } + }; const coverObject = (p: { - $defs?: Record | undefined; - visited: Map>; - x: IGeminiSchema.IObject; - y: IGeminiSchema.IObject; + $defs?: Record | undefined; + visited: Map>; + x: ILlmSchema.IObject; + y: ILlmSchema.IObject; }): boolean => { if (!p.x.additionalProperties && !!p.y.additionalProperties) return false; else if ( @@ -335,7 +327,7 @@ export namespace GeminiTypeChecker { ) return false; return Object.entries(p.y.properties ?? {}).every(([key, b]) => { - const a: IGeminiSchema | undefined = p.x.properties?.[key]; + const a: ILlmSchema | undefined = p.x.properties?.[key]; if (a === undefined) return false; else if ( (p.x.required?.includes(key) ?? false) === true && @@ -352,8 +344,8 @@ export namespace GeminiTypeChecker { }; const coverBoolean = ( - x: IGeminiSchema.IBoolean, - y: IGeminiSchema.IBoolean, + x: ILlmSchema.IBoolean, + y: ILlmSchema.IBoolean, ): boolean => { if (!!x.enum?.length) return !!y.enum?.length && y.enum.every((v) => x.enum!.includes(v)); @@ -361,8 +353,8 @@ export namespace GeminiTypeChecker { }; const coverInteger = ( - x: IGeminiSchema.IInteger, - y: IGeminiSchema.IInteger, + x: ILlmSchema.IInteger, + y: ILlmSchema.IInteger, ): boolean => { if (!!x.enum?.length) return !!y.enum?.length && y.enum.every((v) => x.enum!.includes(v)); @@ -370,8 +362,8 @@ export namespace GeminiTypeChecker { }; const coverNumber = ( - x: IGeminiSchema.INumber, - y: IGeminiSchema.IInteger | IGeminiSchema.INumber, + x: ILlmSchema.INumber, + y: ILlmSchema.IInteger | ILlmSchema.INumber, ): boolean => { if (!!x.enum?.length) return !!y.enum?.length && y.enum.every((v) => x.enum!.includes(v)); @@ -379,8 +371,8 @@ export namespace GeminiTypeChecker { }; const coverString = ( - x: IGeminiSchema.IString, - y: IGeminiSchema.IString, + x: ILlmSchema.IString, + y: ILlmSchema.IString, ): boolean => { if (!!x.enum?.length) return !!y.enum?.length && y.enum.every((v) => x.enum!.includes(v)); @@ -388,9 +380,9 @@ export namespace GeminiTypeChecker { }; const flatSchema = ( - $defs: Record | undefined, - schema: IGeminiSchema, - ): IGeminiSchema[] => { + $defs: Record | undefined, + schema: ILlmSchema, + ): ILlmSchema[] => { schema = escapeReference($defs, schema); if (isAnyOf(schema)) return schema.anyOf.map((v) => flatSchema($defs, v)).flat(); @@ -398,9 +390,9 @@ export namespace GeminiTypeChecker { }; const escapeReference = ( - $defs: Record | undefined, - schema: IGeminiSchema, - ): Exclude => + $defs: Record | undefined, + schema: ILlmSchema, + ): Exclude => isReference(schema) ? escapeReference($defs, $defs![schema.$ref.replace("#/$defs/", "")]!) : schema; diff --git a/src/utils/LlmTypeCheckerV3.ts b/src/utils/LlmTypeCheckerV3.ts deleted file mode 100644 index dd05a7e3..00000000 --- a/src/utils/LlmTypeCheckerV3.ts +++ /dev/null @@ -1,307 +0,0 @@ -import { ILlmSchemaV3 } from "../structures/ILlmSchemaV3"; -import { OpenApiTypeCheckerBase } from "./internal/OpenApiTypeCheckerBase"; - -/** - * Type checker for LLM type schema v3. - * - * `LlmTypeCheckerV3` is a type checker of {@link ILlmSchemaV3}. - * - * @author Jeongho Nam - https://github.com/samchon - */ -export namespace LlmTypeCheckerV3 { - /* ----------------------------------------------------------- - OPERATORS - ----------------------------------------------------------- */ - /** - * Visit every nested schemas. - * - * Visit every nested schemas of the target, and apply the `props.closure` - * function. - * - * Here is the list of occurring nested visitings: - * - * - {@link ILlmSchemaV3.IOneOf.oneOf} - * - {@link ILlmSchemaV3.IObject.additionalProperties} - * - {@link ILlmSchemaV3.IArray.items} - * - * @param props Properties for visiting - */ - export const visit = (props: { - closure: (schema: ILlmSchemaV3, accessor: string) => void; - schema: ILlmSchemaV3; - accessor?: string; - }): void => { - const accessor: string = props.accessor ?? "$input.schema"; - props.closure(props.schema, accessor); - if (isOneOf(props.schema)) - props.schema.oneOf.forEach((s, i) => - visit({ - closure: props.closure, - schema: s, - accessor: `${accessor}.oneOf[${i}]`, - }), - ); - else if (isObject(props.schema)) { - for (const [k, s] of Object.entries(props.schema.properties)) - visit({ - closure: props.closure, - schema: s, - accessor: `${accessor}.properties[${JSON.stringify(k)}]`, - }); - if ( - typeof props.schema.additionalProperties === "object" && - props.schema.additionalProperties !== null - ) - visit({ - closure: props.closure, - schema: props.schema.additionalProperties, - accessor: `${accessor}.additionalProperties`, - }); - } else if (isArray(props.schema)) - visit({ - closure: props.closure, - schema: props.schema.items, - accessor: `${accessor}.items`, - }); - }; - - export const covers = (x: ILlmSchemaV3, y: ILlmSchemaV3): boolean => { - const alpha: ILlmSchemaV3[] = flatSchema(x); - const beta: ILlmSchemaV3[] = flatSchema(y); - if (alpha.some((x) => isUnknown(x))) return true; - else if (beta.some((x) => isUnknown(x))) return false; - return beta.every((b) => - alpha.some((a) => { - // CHECK EQUALITY - if (a === b) return true; - else if (isUnknown(a)) return true; - else if (isUnknown(b)) return false; - else if (isNullOnly(a)) return isNullOnly(b); - else if (isNullOnly(b)) return isNullable(a); - else if (isNullable(a) && !isNullable(b)) return false; - // ATOMIC CASE - else if (isBoolean(a)) return isBoolean(b) && coverBoolean(a, b); - else if (isInteger(a)) return isInteger(b) && coverInteger(a, b); - else if (isNumber(a)) - return (isNumber(b) || isInteger(b)) && coverNumber(a, b); - else if (isString(a)) return isString(b) && covertString(a, b); - // INSTANCE CASE - else if (isArray(a)) return isArray(b) && coverArray(a, b); - else if (isObject(a)) return isObject(b) && coverObject(a, b); - else if (isOneOf(a)) return false; - }), - ); - }; - - /** @internal */ - const coverBoolean = ( - x: ILlmSchemaV3.IBoolean, - y: ILlmSchemaV3.IBoolean, - ): boolean => - x.enum === undefined || - (y.enum !== undefined && x.enum.every((v) => y.enum!.includes(v))); - - /** @internal */ - const coverInteger = ( - x: ILlmSchemaV3.IInteger, - y: ILlmSchemaV3.IInteger, - ): boolean => { - if (x.enum !== undefined) - return y.enum !== undefined && x.enum.every((v) => y.enum!.includes(v)); - return x.type === y.type && OpenApiTypeCheckerBase.coverNumericRange(x, y); - }; - - /** @internal */ - const coverNumber = ( - x: ILlmSchemaV3.INumber, - y: ILlmSchemaV3.INumber | ILlmSchemaV3.IInteger, - ): boolean => { - if (x.enum !== undefined) - return y.enum !== undefined && x.enum.every((v) => y.enum!.includes(v)); - return ( - (x.type === y.type || (x.type === "number" && y.type === "integer")) && - OpenApiTypeCheckerBase.coverNumericRange(x, y) - ); - }; - - /** @internal */ - const covertString = ( - x: ILlmSchemaV3.IString, - y: ILlmSchemaV3.IString, - ): boolean => { - if (x.enum !== undefined) - return y.enum !== undefined && x.enum.every((v) => y.enum!.includes(v)); - return [ - x.type === y.type, - x.format === undefined || - (y.format !== undefined && coverFormat(x.format, y.format)), - x.pattern === undefined || x.pattern === y.pattern, - x.minLength === undefined || - (y.minLength !== undefined && x.minLength <= y.minLength), - x.maxLength === undefined || - (y.maxLength !== undefined && x.maxLength >= y.maxLength), - ].every((v) => v); - }; - - const coverFormat = ( - x: Required["format"], - y: Required["format"], - ): boolean => - x === y || - (x === "idn-email" && y === "email") || - (x === "idn-hostname" && y === "hostname") || - (["uri", "iri"].includes(x) && y === "url") || - (x === "iri" && y === "uri") || - (x === "iri-reference" && y === "uri-reference"); - - /** @internal */ - const coverArray = ( - x: ILlmSchemaV3.IArray, - y: ILlmSchemaV3.IArray, - ): boolean => covers(x.items, y.items); - - const coverObject = ( - x: ILlmSchemaV3.IObject, - y: ILlmSchemaV3.IObject, - ): boolean => { - if (!x.additionalProperties && !!y.additionalProperties) return false; - else if ( - (!!x.additionalProperties && - !!y.additionalProperties && - typeof x.additionalProperties === "object" && - y.additionalProperties === true) || - (typeof x.additionalProperties === "object" && - typeof y.additionalProperties === "object" && - !covers(x.additionalProperties, y.additionalProperties)) - ) - return false; - return Object.entries(y.properties ?? {}).every(([key, b]) => { - const a: ILlmSchemaV3 | undefined = x.properties?.[key]; - if (a === undefined) return false; - else if ( - (x.required?.includes(key) ?? false) === true && - (y.required?.includes(key) ?? false) === false - ) - return false; - return covers(a, b); - }); - }; - - const flatSchema = (schema: ILlmSchemaV3): ILlmSchemaV3[] => - isOneOf(schema) ? schema.oneOf.flatMap(flatSchema) : [schema]; - - /* ----------------------------------------------------------- - TYPE CHECKERS - ----------------------------------------------------------- */ - /** - * Test whether the schema is an union type. - * - * @param schema Target schema - * @returns Whether union type or not - */ - export const isOneOf = ( - schema: ILlmSchemaV3, - ): schema is ILlmSchemaV3.IOneOf => - (schema as ILlmSchemaV3.IOneOf).oneOf !== undefined; - - /** - * Test whether the schema is an object type. - * - * @param schema Target schema - * @returns Whether object type or not - */ - export const isObject = ( - schema: ILlmSchemaV3, - ): schema is ILlmSchemaV3.IObject => - (schema as ILlmSchemaV3.IObject).type === "object"; - - /** - * Test whether the schema is an array type. - * - * @param schema Target schema - * @returns Whether array type or not - */ - export const isArray = ( - schema: ILlmSchemaV3, - ): schema is ILlmSchemaV3.IArray => - (schema as ILlmSchemaV3.IArray).type === "array"; - - /** - * Test whether the schema is a boolean type. - * - * @param schema Target schema - * @returns Whether boolean type or not - */ - export const isBoolean = ( - schema: ILlmSchemaV3, - ): schema is ILlmSchemaV3.IBoolean => - (schema as ILlmSchemaV3.IBoolean).type === "boolean"; - - /** - * Test whether the schema is an integer type. - * - * @param schema Target schema - * @returns Whether integer type or not - */ - export const isInteger = ( - schema: ILlmSchemaV3, - ): schema is ILlmSchemaV3.IInteger => - (schema as ILlmSchemaV3.IInteger).type === "integer"; - - /** - * Test whether the schema is a number type. - * - * @param schema Target schema - * @returns Whether number type or not - */ - export const isNumber = ( - schema: ILlmSchemaV3, - ): schema is ILlmSchemaV3.INumber => - (schema as ILlmSchemaV3.INumber).type === "number"; - - /** - * Test whether the schema is a string type. - * - * @param schema Target schema - * @returns Whether string type or not - */ - export const isString = ( - schema: ILlmSchemaV3, - ): schema is ILlmSchemaV3.IString => - (schema as ILlmSchemaV3.IString).type === "string"; - - /** - * Test whether the schema is a null type. - * - * @param schema Target schema - * @returns Whether null type or not - */ - export const isNullOnly = ( - schema: ILlmSchemaV3, - ): schema is ILlmSchemaV3.INullOnly => - (schema as ILlmSchemaV3.INullOnly).type === "null"; - - /** - * Test whether the schema is a nullable type. - * - * @param schema Target schema - * @returns Whether nullable type or not - */ - export const isNullable = (schema: ILlmSchemaV3): boolean => - !isUnknown(schema) && - (isNullOnly(schema) || - (isOneOf(schema) - ? schema.oneOf.some(isNullable) - : schema.nullable === true)); - - /** - * Test whether the schema is an unknown type. - * - * @param schema Target schema - * @returns Whether unknown type or not - */ - export const isUnknown = ( - schema: ILlmSchemaV3, - ): schema is ILlmSchemaV3.IUnknown => - !isOneOf(schema) && (schema as ILlmSchemaV3.IUnknown).type === undefined; -} diff --git a/src/utils/LlmTypeCheckerV3_1.ts b/src/utils/LlmTypeCheckerV3_1.ts deleted file mode 100644 index 701fc3aa..00000000 --- a/src/utils/LlmTypeCheckerV3_1.ts +++ /dev/null @@ -1,206 +0,0 @@ -import { ILlmSchemaV3_1 } from "../structures/ILlmSchemaV3_1"; -import { OpenApiTypeCheckerBase } from "./internal/OpenApiTypeCheckerBase"; - -/** - * Type checker for LLM type schema v3.1. - * - * `LlmTypeCheckerV3_1` is a type checker of {@link ILlmSchemaV3_1}. - * - * @author Jeongho Nam - https://github.com/samchon - */ -export namespace LlmTypeCheckerV3_1 { - /* ----------------------------------------------------------- - TYPE CHECKERS - ----------------------------------------------------------- */ - /** - * Test whether the schema is a nul type. - * - * @param schema Target schema - * @returns Whether null type or not - */ - export const isNull = ( - schema: ILlmSchemaV3_1, - ): schema is ILlmSchemaV3_1.INull => OpenApiTypeCheckerBase.isNull(schema); - - /** - * Test whether the schema is an unknown type. - * - * @param schema Target schema - * @returns Whether unknown type or not - */ - export const isUnknown = ( - schema: ILlmSchemaV3_1, - ): schema is ILlmSchemaV3_1.IUnknown => - OpenApiTypeCheckerBase.isUnknown(schema); - - /** - * Test whether the schema is a constant type. - * - * @param schema Target schema - * @returns Whether constant type or not - */ - export const isConstant = ( - schema: ILlmSchemaV3_1, - ): schema is ILlmSchemaV3_1.IConstant => - OpenApiTypeCheckerBase.isConstant(schema); - - /** - * Test whether the schema is a boolean type. - * - * @param schema Target schema - * @returns Whether boolean type or not - */ - export const isBoolean = ( - schema: ILlmSchemaV3_1, - ): schema is ILlmSchemaV3_1.IBoolean => - OpenApiTypeCheckerBase.isBoolean(schema); - - /** - * Test whether the schema is an integer type. - * - * @param schema Target schema - * @returns Whether integer type or not - */ - export const isInteger = ( - schema: ILlmSchemaV3_1, - ): schema is ILlmSchemaV3_1.IInteger => - OpenApiTypeCheckerBase.isInteger(schema); - - /** - * Test whether the schema is a number type. - * - * @param schema Target schema - * @returns Whether number type or not - */ - export const isNumber = ( - schema: ILlmSchemaV3_1, - ): schema is ILlmSchemaV3_1.INumber => - OpenApiTypeCheckerBase.isNumber(schema); - - /** - * Test whether the schema is a string type. - * - * @param schema Target schema - * @returns Whether string type or not - */ - export const isString = ( - schema: ILlmSchemaV3_1, - ): schema is ILlmSchemaV3_1.IString => - OpenApiTypeCheckerBase.isString(schema); - - /** - * Test whether the schema is an array type. - * - * @param schema Target schema - * @returns Whether array type or not - */ - export const isArray = ( - schema: ILlmSchemaV3_1, - ): schema is ILlmSchemaV3_1.IArray => OpenApiTypeCheckerBase.isArray(schema); - - /** - * Test whether the schema is an object type. - * - * @param schema Target schema - * @returns Whether object type or not - */ - export const isObject = ( - schema: ILlmSchemaV3_1, - ): schema is ILlmSchemaV3_1.IObject => - OpenApiTypeCheckerBase.isObject(schema); - - /** - * Test whether the schema is a reference type. - * - * @param schema Target schema - * @returns Whether reference type or not - */ - export const isReference = ( - schema: ILlmSchemaV3_1, - ): schema is ILlmSchemaV3_1.IReference => - OpenApiTypeCheckerBase.isReference(schema); - - /** - * Test whether the schema is an union type. - * - * @param schema Target schema - * @returns Whether union type or not - */ - export const isOneOf = ( - schema: ILlmSchemaV3_1, - ): schema is ILlmSchemaV3_1.IOneOf => OpenApiTypeCheckerBase.isOneOf(schema); - - /** - * Test whether the schema is recursive reference type. - * - * Test whether the target schema is a reference type, and test one thing more - * that the reference is self-recursive or not. - * - * @param props Properties for recursive reference test - * @returns Whether the schema is recursive reference type or not - */ - export const isRecursiveReference = (props: { - $defs?: Record; - schema: ILlmSchemaV3_1; - }): boolean => - OpenApiTypeCheckerBase.isRecursiveReference({ - prefix: "#/$defs/", - components: { - schemas: props.$defs, - }, - schema: props.schema, - }); - - /* ----------------------------------------------------------- - OPERATORS - ----------------------------------------------------------- */ - /** - * Test whether the `x` schema covers the `y` schema. - * - * @param props Properties for testing - * @returns Whether the `x` schema covers the `y` schema - */ - export const covers = (props: { - $defs?: Record; - x: ILlmSchemaV3_1; - y: ILlmSchemaV3_1; - }): boolean => - OpenApiTypeCheckerBase.covers({ - prefix: "#/$defs/", - components: { - schemas: props.$defs, - }, - x: props.x, - y: props.y, - }); - - /** - * Visit every nested schemas. - * - * Visit every nested schemas of the target, and apply the `props.closure` - * function. - * - * Here is the list of occurring nested visitings: - * - * - {@link ILlmSchemaV3_1.IOneOf.oneOf} - * - {@link ILlmSchemaV3_1.IReference} - * - {@link ILlmSchemaV3_1.IObject.properties} - * - {@link ILlmSchemaV3_1.IObject.additionalProperties} - * - {@link ILlmSchemaV3_1.IArray.items} - * - * @param props Properties for visiting - */ - export const visit = (props: { - closure: (schema: ILlmSchemaV3_1, accessor: string) => void; - $defs?: Record; - schema: ILlmSchemaV3_1; - }): void => - OpenApiTypeCheckerBase.visit({ - prefix: "#/$defs/", - components: { - schemas: props.$defs, - }, - closure: props.closure as any, - schema: props.schema, - }); -} diff --git a/test/src/examples/chatgpt-function-call-to-sale-create.ts b/test/src/examples/chatgpt-function-call-to-sale-create.ts index 9824d31f..f89e32b2 100644 --- a/test/src/examples/chatgpt-function-call-to-sale-create.ts +++ b/test/src/examples/chatgpt-function-call-to-sale-create.ts @@ -25,17 +25,15 @@ const main = async (): Promise => { // convert to emended OpenAPI document, // and compose LLM function calling application const document: OpenApi.IDocument = OpenApi.convert(swagger); - const application: IHttpLlmApplication<"chatgpt"> = HttpLlm.application({ - model: "chatgpt", + const application: IHttpLlmApplication = HttpLlm.application({ document, }); // Let's imagine that LLM has selected a function to call - const func: IHttpLlmFunction<"chatgpt"> | undefined = - application.functions.find( - // (f) => f.name === "llm_selected_function_name" - (f) => f.path === "/shoppings/sellers/sale" && f.method === "post", - ); + const func: IHttpLlmFunction | undefined = application.functions.find( + // (f) => f.name === "llm_selected_function_name" + (f) => f.path === "/shoppings/sellers/sale" && f.method === "post", + ); if (func === undefined) throw new Error("No matched function exists."); // Get arguments by ChatGPT function calling diff --git a/test/src/examples/claude-function-call-separate-to-sale-create.ts.ts b/test/src/examples/claude-function-call-separate-to-sale-create.ts.ts index 8c0a7c2a..91cd6341 100644 --- a/test/src/examples/claude-function-call-separate-to-sale-create.ts.ts +++ b/test/src/examples/claude-function-call-separate-to-sale-create.ts.ts @@ -1,9 +1,9 @@ import Anthropic from "@anthropic-ai/sdk"; import { - ClaudeTypeChecker, HttpLlm, IHttpLlmApplication, IHttpLlmFunction, + LlmTypeChecker, OpenApi, OpenApiV3, OpenApiV3_1, @@ -26,23 +26,20 @@ const main = async (): Promise => { // convert to emended OpenAPI document, // and compose LLM function calling application const document: OpenApi.IDocument = OpenApi.convert(swagger); - const application: IHttpLlmApplication<"claude"> = HttpLlm.application({ - model: "claude", + const application: IHttpLlmApplication = HttpLlm.application({ document, - options: { - reference: true, + config: { separate: (schema) => - ClaudeTypeChecker.isString(schema) && + LlmTypeChecker.isString(schema) && !!schema.contentMediaType?.startsWith("image"), }, }); // Let's imagine that LLM has selected a function to call - const func: IHttpLlmFunction<"claude"> | undefined = - application.functions.find( - // (f) => f.name === "llm_selected_fuction_name" - (f) => f.path === "/shoppings/sellers/sale" && f.method === "post", - ); + const func: IHttpLlmFunction | undefined = application.functions.find( + // (f) => f.name === "llm_selected_fuction_name" + (f) => f.path === "/shoppings/sellers/sale" && f.method === "post", + ); if (func === undefined) throw new Error("No matched function exists."); // Get arguments by ChatGPT function calling diff --git a/test/src/executable/execute.ts b/test/src/executable/execute.ts index 3b83f3b2..c99d1a98 100644 --- a/test/src/executable/execute.ts +++ b/test/src/executable/execute.ts @@ -23,16 +23,15 @@ const main = async (): Promise => { // convert to emended OpenAPI document, // and compose LLM function calling application const document: OpenApi.IDocument = OpenApi.convert(swagger); - const application: IHttpLlmApplication<"3.0"> = HttpLlm.application({ - model: "3.0", + const application: IHttpLlmApplication = HttpLlm.application({ document, }); // Let's imagine that LLM has selected a function to call - const func: IHttpLlmFunction<"3.0"> | undefined = application.functions.find( + const func: IHttpLlmFunction | undefined = application.functions.find( (f) => f.path === "/bbs/articles" && f.method === "post", ); - typia.assertGuard>(func); + typia.assertGuard(func); // actual execution is by yourself const article = await HttpLlm.execute({ diff --git a/test/src/executable/sale.ts b/test/src/executable/sale.ts index 3c93b10e..9d33ca04 100644 --- a/test/src/executable/sale.ts +++ b/test/src/executable/sale.ts @@ -1,4 +1,3 @@ -import { ILlmSchema } from "@samchon/openapi"; import fs from "fs"; import typia from "typia"; @@ -8,32 +7,30 @@ import { LlmFunctionCaller } from "../utils/LlmFunctionCaller"; import { ShoppingSalePrompt } from "../utils/ShoppingSalePrompt"; import { StopWatch } from "../utils/StopWatch"; -const VENDORS: Array<[string, ILlmSchema.Model]> = [ - ["openai/gpt-4.1", "chatgpt"], - ["anthropic/claude-sonnet-4.5", "claude"], - ["deepseek/deepseek-v3.1-terminus:exacto", "claude"], - ["google/gemini-2.5-pro", "gemini"], - ["meta-llama/llama-3.3-70b-instruct", "claude"], - ["qwen/qwen3-next-80b-a3b-instruct", "claude"], +const VENDORS: string[] = [ + "openai/gpt-4.1", + "anthropic/claude-sonnet-4.5", + "deepseek/deepseek-v3.1-terminus:exacto", + "google/gemini-2.5-pro", + "meta-llama/llama-3.3-70b-instruct", + "qwen/qwen3-next-80b-a3b-instruct", ]; const main = async (): Promise => { for (const title of await ShoppingSalePrompt.documents()) - for (const [vendor, model] of VENDORS) + for (const vendor of VENDORS) try { const application = LlmApplicationFactory.convert({ - model, application: typia.json.application(), }); - await StopWatch.trace(`${title} - ${model}`)(async () => + await StopWatch.trace(`${title} - ${vendor}`)(async () => LlmFunctionCaller.test({ vendor, - model, function: application.functions[0] as any, texts: await ShoppingSalePrompt.texts(title), handleCompletion: async (input) => { await fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/function-calling/arguments/${model}.${title}.input.json`, + `${TestGlobal.ROOT}/examples/function-calling/arguments/llm.${title}.input.json`, JSON.stringify(input, null, 2), "utf8", ); @@ -43,7 +40,7 @@ const main = async (): Promise => { } catch (error) { console.log(title, " -> Error"); await fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/function-calling/arguments/${model}.${title}.error.json`, + `${TestGlobal.ROOT}/examples/function-calling/arguments/llm.${title}.error.json`, JSON.stringify( typia.is(error) ? { ...error } : error, null, diff --git a/test/src/features/issues/test_issue_104_upgrade_v20_allOf.ts b/test/src/features/issues/test_issue_104_upgrade_v20_allOf.ts index 12ba87f2..0da4231a 100644 --- a/test/src/features/issues/test_issue_104_upgrade_v20_allOf.ts +++ b/test/src/features/issues/test_issue_104_upgrade_v20_allOf.ts @@ -36,8 +36,7 @@ export const test_issue_104_upgrade_v20_allOf = async (): Promise => { ); } - const app: IHttpLlmApplication<"claude"> = HttpLlm.application({ - model: "claude", + const app: IHttpLlmApplication = HttpLlm.application({ document, }); TestValidator.equals("errors")(app.errors.length)(0); diff --git a/test/src/features/issues/test_issue_127_enum_description.ts b/test/src/features/issues/test_issue_127_enum_description.ts index cbe4947a..574e0de8 100644 --- a/test/src/features/issues/test_issue_127_enum_description.ts +++ b/test/src/features/issues/test_issue_127_enum_description.ts @@ -5,8 +5,8 @@ import typia, { IJsonSchemaCollection } from "typia"; export const test_issue_127_enum_description = (): void => { const collection: IJsonSchemaCollection = typia.json.schemas<[ISomething]>(); - const chatgpt = LlmSchemaComposer.parameters("chatgpt")({ - config: LlmSchemaComposer.defaultConfig("chatgpt"), + const chatgpt = LlmSchemaComposer.parameters({ + config: LlmSchemaComposer.getConfig(), components: collection.components, schema: collection.schemas[0] as OpenApi.IJsonSchema.IReference, }); @@ -17,8 +17,8 @@ export const test_issue_127_enum_description = (): void => { : "", )("The description."); - const gemini = LlmSchemaComposer.parameters("gemini")({ - config: LlmSchemaComposer.defaultConfig("gemini"), + const gemini = LlmSchemaComposer.parameters({ + config: LlmSchemaComposer.getConfig(), components: collection.components, schema: collection.schemas[0] as OpenApi.IJsonSchema.IReference, }); diff --git a/test/src/features/llm/application/validate_llm_applicationEquals.ts b/test/src/features/llm/application/test_llm_applicationEquals.ts similarity index 56% rename from test/src/features/llm/application/validate_llm_applicationEquals.ts rename to test/src/features/llm/application/test_llm_applicationEquals.ts index f384a128..126333ef 100644 --- a/test/src/features/llm/application/validate_llm_applicationEquals.ts +++ b/test/src/features/llm/application/test_llm_applicationEquals.ts @@ -3,39 +3,18 @@ import { HttpLlm, IHttpLlmApplication, IHttpLlmFunction, - ILlmSchema, IValidation, OpenApi, } from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -export const test_chatgpt_applicationEquals = () => - validate_llm_applicationEquals("chatgpt"); - -export const test_claude_applicationEquals = () => - validate_llm_applicationEquals("claude"); - -export const test_gemini_applicationEquals = () => - validate_llm_applicationEquals("gemini"); - -export const test_llm_v30_applicationEquals = () => - validate_llm_applicationEquals("3.0"); - -export const test_llm_v31_applicationEquals = () => - validate_llm_applicationEquals("3.1"); - -const validate_llm_applicationEquals = ( - model: Model, -): void => { - const application: IHttpLlmApplication = HttpLlm.application({ - model, +export const test_llm_applicationEquals = (): void => { + const application: IHttpLlmApplication = HttpLlm.application({ document, - options: { - ...LlmSchemaComposer.defaultConfig(model), + config: { equals: true, - } as any, + }, }); - const func: IHttpLlmFunction = application.functions[0]; + const func: IHttpLlmFunction = application.functions[0]; const result: IValidation = func.validate({ body: { value: 1, diff --git a/test/src/features/llm/application/validate_llm_application_mismatch.ts b/test/src/features/llm/application/test_llm_application_mismatch.ts similarity index 70% rename from test/src/features/llm/application/validate_llm_application_mismatch.ts rename to test/src/features/llm/application/test_llm_application_mismatch.ts index 1ca01442..10be5373 100644 --- a/test/src/features/llm/application/validate_llm_application_mismatch.ts +++ b/test/src/features/llm/application/test_llm_application_mismatch.ts @@ -1,30 +1,8 @@ import { TestValidator } from "@nestia/e2e"; -import { - HttpLlm, - IHttpLlmApplication, - ILlmSchema, - OpenApi, -} from "@samchon/openapi"; +import { HttpLlm, IHttpLlmApplication, OpenApi } from "@samchon/openapi"; import typia, { IJsonSchemaCollection } from "typia"; -export const test_chatgpt_application_mismatch = (): void => - validate_llm_application_mismatch("chatgpt"); - -export const test_claude_application_mismatch = (): void => - validate_llm_application_mismatch("claude"); - -export const test_gemini_application_mismatch = (): void => - validate_llm_application_mismatch("gemini"); - -export const test_llm_v30_application_mismatch = (): void => - validate_llm_application_mismatch("3.0"); - -export const test_llm_v31_application_mismatch = (): void => - validate_llm_application_mismatch("3.1"); - -const validate_llm_application_mismatch = ( - model: Model, -): void => { +export const test_llm_application_mismatch = (): void => { const collection: IJsonSchemaCollection = typia.json.schemas<[IPoint, ICircle, IRectangle]>(); collection.schemas[0] = { $ref: "#/components/schemas/IPoint1" }; @@ -72,8 +50,7 @@ const validate_llm_application_mismatch = ( "x-samchon-emended-v4": true, }; - const app: IHttpLlmApplication = HttpLlm.application({ - model, + const app: IHttpLlmApplication = HttpLlm.application({ document, }); TestValidator.equals("#success")(app.functions.length)(0); diff --git a/test/src/features/llm/application/test_llm_application_separate.ts b/test/src/features/llm/application/test_llm_application_separate.ts new file mode 100644 index 00000000..440af47b --- /dev/null +++ b/test/src/features/llm/application/test_llm_application_separate.ts @@ -0,0 +1,35 @@ +import { TestValidator } from "@nestia/e2e"; +import { + HttpLlm, + IHttpLlmApplication, + LlmTypeChecker, + OpenApi, + OpenApiV3, + OpenApiV3_1, + SwaggerV2, +} from "@samchon/openapi"; +import { Singleton } from "tstl"; +import typia from "typia"; + +export const test_llm_application_separate = async (): Promise => { + const application: IHttpLlmApplication = HttpLlm.application({ + document: await document.get(), + config: { + separate: (schema) => + LlmTypeChecker.isString(schema as any) && + (schema as any)["x-wrtn-secret-key"] !== undefined, + }, + }); + for (const func of application.functions) + TestValidator.equals("separated")(!!func.separated)(true); +}; + +const document = new Singleton(async (): Promise => { + const swagger: + | SwaggerV2.IDocument + | OpenApiV3.IDocument + | OpenApiV3_1.IDocument = await fetch( + "https://wrtnlabs.github.io/connectors/swagger/swagger.json", + ).then((r) => r.json()); + return OpenApi.convert(typia.assert(swagger)); +}); diff --git a/test/src/features/llm/application/validate_llm_application_separateEquals.ts b/test/src/features/llm/application/test_llm_application_separateEquals.ts similarity index 57% rename from test/src/features/llm/application/validate_llm_application_separateEquals.ts rename to test/src/features/llm/application/test_llm_application_separateEquals.ts index b039957a..c8085e9a 100644 --- a/test/src/features/llm/application/validate_llm_application_separateEquals.ts +++ b/test/src/features/llm/application/test_llm_application_separateEquals.ts @@ -3,44 +3,21 @@ import { HttpLlm, IHttpLlmApplication, IHttpLlmFunction, - ILlmSchema, IValidation, OpenApi, OpenApiTypeChecker, } from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -export const test_chatgpt_application_separateEquals = () => - validate_llm_application_separateEquals("chatgpt"); - -export const test_claude_application_separateEquals = () => - validate_llm_application_separateEquals("claude"); - -export const test_gemini_application_separateEquals = () => - validate_llm_application_separateEquals("gemini"); - -export const test_llm_v30_application_separateEquals = () => - validate_llm_application_separateEquals("3.0"); - -export const test_llm_v31_application_separateEquals = () => - validate_llm_application_separateEquals("3.1"); - -const validate_llm_application_separateEquals = < - Model extends ILlmSchema.Model, ->( - model: Model, -): void => { - const application: IHttpLlmApplication = HttpLlm.application({ - model, +export const test_llm_application_separateEquals = (): void => { + const application: IHttpLlmApplication = HttpLlm.application({ document, - options: { - ...LlmSchemaComposer.defaultConfig(model), + config: { equals: true, separate: (schema: OpenApi.IJsonSchema) => OpenApiTypeChecker.isNumber(schema), - } as any, + }, }); - const func: IHttpLlmFunction = application.functions[0]; + const func: IHttpLlmFunction = application.functions[0]; const result: IValidation = func.separated!.validate!({ body: { name: "John Doe", diff --git a/test/src/features/llm/application/validate_llm_application_tuple.ts b/test/src/features/llm/application/test_llm_application_tuple.ts similarity index 75% rename from test/src/features/llm/application/validate_llm_application_tuple.ts rename to test/src/features/llm/application/test_llm_application_tuple.ts index 08afdb15..3ae6ea10 100644 --- a/test/src/features/llm/application/validate_llm_application_tuple.ts +++ b/test/src/features/llm/application/test_llm_application_tuple.ts @@ -1,30 +1,8 @@ import { TestValidator } from "@nestia/e2e"; -import { - HttpLlm, - IHttpLlmApplication, - ILlmSchema, - OpenApi, -} from "@samchon/openapi"; +import { HttpLlm, IHttpLlmApplication, OpenApi } from "@samchon/openapi"; import typia, { IJsonSchemaCollection } from "typia"; -export const test_chatgpt_application_mismatch = (): void => - validate_llm_application_tuple("chatgpt"); - -export const test_claude_application_mismatch = (): void => - validate_llm_application_tuple("claude"); - -export const test_gemini_application_mismatch = (): void => - validate_llm_application_tuple("gemini"); - -export const test_llm_v30_application_mismatch = (): void => - validate_llm_application_tuple("3.0"); - -export const test_llm_v31_application_mismatch = (): void => - validate_llm_application_tuple("3.1"); - -const validate_llm_application_tuple = ( - model: Model, -): void => { +export const test_llm_application_tuple = (): void => { const collection: IJsonSchemaCollection = typia.json.schemas< [ [number, number], @@ -97,8 +75,7 @@ const validate_llm_application_tuple = ( }, "x-samchon-emended-v4": true, }; - const app: IHttpLlmApplication = HttpLlm.application({ - model, + const app: IHttpLlmApplication = HttpLlm.application({ document, }); diff --git a/test/src/features/llm/application/test_llm_application_type.ts b/test/src/features/llm/application/test_llm_application_type.ts new file mode 100644 index 00000000..90f00216 --- /dev/null +++ b/test/src/features/llm/application/test_llm_application_type.ts @@ -0,0 +1,30 @@ +import { + HttpLlm, + IHttpLlmApplication, + ILlmApplication, + OpenApi, +} from "@samchon/openapi"; +import fs from "fs"; +import { Singleton } from "tstl"; +import typia from "typia"; + +import { TestGlobal } from "../../../TestGlobal"; + +export const test_llm_application_type = (): void => { + const http: IHttpLlmApplication = application(); + const classic: Omit = http; + typia.assert(classic); +}; + +const application = () => + HttpLlm.application({ + document: document.get(), + }); + +const document = new Singleton(() => + OpenApi.convert( + JSON.parse( + fs.readFileSync(`${TestGlobal.ROOT}/examples/v3.1/shopping.json`, "utf8"), + ), + ), +); diff --git a/test/src/features/llm/application/validate_llm_application_separate.ts b/test/src/features/llm/application/validate_llm_application_separate.ts deleted file mode 100644 index bb477c90..00000000 --- a/test/src/features/llm/application/validate_llm_application_separate.ts +++ /dev/null @@ -1,67 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { - HttpLlm, - IHttpLlmApplication, - ILlmSchema, - OpenApi, - OpenApiV3, - OpenApiV3_1, - SwaggerV2, -} from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import { Singleton } from "tstl"; -import typia from "typia"; - -export const test_chatgpt_application_separate = async (): Promise => { - await validate_llm_application_separate("chatgpt", false); - await validate_llm_application_separate("chatgpt", true); -}; - -export const test_claude_application_separate = async (): Promise => { - await validate_llm_application_separate("claude", false); - await validate_llm_application_separate("claude", true); -}; - -export const test_gemini_application_separate = async (): Promise => { - await validate_llm_application_separate("gemini", false); -}; - -export const test_llm_v30_application_separate = async (): Promise => { - await validate_llm_application_separate("3.0", false); - await validate_llm_application_separate("3.0", true); -}; - -export const test_llm_v31_application_separate = async (): Promise => { - await validate_llm_application_separate("3.1", false); - await validate_llm_application_separate("3.1", true); -}; - -const validate_llm_application_separate = async < - Model extends ILlmSchema.Model, ->( - model: Model, - constraint: boolean, -): Promise => { - const application: IHttpLlmApplication = HttpLlm.application({ - model, - document: await document.get(), - options: { - separate: (schema: any) => - LlmSchemaComposer.typeChecker(model).isString(schema as any) && - (schema as any)["x-wrtn-secret-key"] !== undefined, - constraint: constraint as any, - } as any, - }); - for (const func of application.functions) - TestValidator.equals("separated")(!!func.separated)(true); -}; - -const document = new Singleton(async (): Promise => { - const swagger: - | SwaggerV2.IDocument - | OpenApiV3.IDocument - | OpenApiV3_1.IDocument = await fetch( - "https://wrtnlabs.github.io/connectors/swagger/swagger.json", - ).then((r) => r.json()); - return OpenApi.convert(typia.assert(swagger)); -}); diff --git a/test/src/features/llm/application/validate_llm_application_type.ts b/test/src/features/llm/application/validate_llm_application_type.ts deleted file mode 100644 index 7a9e7d41..00000000 --- a/test/src/features/llm/application/validate_llm_application_type.ts +++ /dev/null @@ -1,50 +0,0 @@ -import { - HttpLlm, - IHttpLlmApplication, - ILlmApplication, - ILlmSchema, - OpenApi, -} from "@samchon/openapi"; -import fs from "fs"; -import { Singleton } from "tstl"; -import typia from "typia"; - -import { TestGlobal } from "../../../TestGlobal"; - -export const test_chatgpt_application_type = (): void => { - const http: IHttpLlmApplication<"chatgpt"> = application("chatgpt"); - const classic: ILlmApplication<"chatgpt"> = http; - typia.assert(classic); -}; - -export const test_claude_application_type = (): void => { - const http: IHttpLlmApplication<"claude"> = application("claude"); - const classic: ILlmApplication<"claude"> = http; - typia.assert(classic); -}; - -export const test_llm_v30_application_type = (): void => { - const http: IHttpLlmApplication<"3.0"> = application("3.0"); - const classic: ILlmApplication<"3.0"> = http; - typia.assert(classic); -}; - -export const test_llm_v31_application_type = (): void => { - const http: IHttpLlmApplication<"3.1"> = application("3.1"); - const classic: ILlmApplication<"3.1"> = http; - typia.assert(classic); -}; - -const application = (model: Model) => - HttpLlm.application({ - model, - document: document.get(), - }); - -const document = new Singleton(() => - OpenApi.convert( - JSON.parse( - fs.readFileSync(`${TestGlobal.ROOT}/examples/v3.1/shopping.json`, "utf8"), - ), - ), -); diff --git a/test/src/features/llm/chatgpt/test_chatgpt_schema_additionalProperties.ts b/test/src/features/llm/chatgpt/test_chatgpt_schema_additionalProperties.ts deleted file mode 100644 index 547e9b70..00000000 --- a/test/src/features/llm/chatgpt/test_chatgpt_schema_additionalProperties.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { ChatGptSchemaComposer } from "@samchon/openapi/lib/composers/llm/ChatGptSchemaComposer"; -import typia from "typia"; - -export const test_chatgpt_schema_additionalProperties = (): void => { - interface IMember { - name: string; - age: number; - hobby: Record; - } - const collection = typia.json.schemas<[IMember]>(); - for (const strict of [false, true]) { - const result = ChatGptSchemaComposer.schema({ - config: { - reference: false, - strict, - }, - $defs: {}, - components: collection.components, - schema: collection.schemas[0], - }); - TestValidator.equals("success")(result.success)(!strict); - } -}; diff --git a/test/src/features/llm/chatgpt/test_chatgpt_schema_optional.ts b/test/src/features/llm/chatgpt/test_chatgpt_schema_optional.ts deleted file mode 100644 index dec27f0a..00000000 --- a/test/src/features/llm/chatgpt/test_chatgpt_schema_optional.ts +++ /dev/null @@ -1,24 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { ChatGptSchemaComposer } from "@samchon/openapi/lib/composers/llm/ChatGptSchemaComposer"; -import typia from "typia"; - -export const test_chatgpt_schema_optional = (): void => { - interface IMember { - name: string; - age: number; - hobby?: string; - } - const collection = typia.json.schemas<[IMember]>(); - for (const strict of [false, true]) { - const result = ChatGptSchemaComposer.schema({ - config: { - reference: false, - strict, - }, - $defs: {}, - components: collection.components, - schema: collection.schemas[0], - }); - TestValidator.equals("success")(result.success)(!strict); - } -}; diff --git a/test/src/features/llm/function_calling/validate_llm_function_calling_additionalProperties.ts b/test/src/features/llm/function_calling/test_llm_function_calling_additionalProperties.ts similarity index 55% rename from test/src/features/llm/function_calling/validate_llm_function_calling_additionalProperties.ts rename to test/src/features/llm/function_calling/test_llm_function_calling_additionalProperties.ts index af5d99a0..072a9685 100644 --- a/test/src/features/llm/function_calling/validate_llm_function_calling_additionalProperties.ts +++ b/test/src/features/llm/function_calling/test_llm_function_calling_additionalProperties.ts @@ -1,4 +1,4 @@ -import { ILlmApplication, ILlmSchema } from "@samchon/openapi"; +import { ILlmApplication } from "@samchon/openapi"; import fs from "fs"; import typia, { tags } from "typia"; @@ -6,56 +6,19 @@ import { TestGlobal } from "../../../TestGlobal"; import { LlmApplicationFactory } from "../../../utils/LlmApplicationFactory"; import { LlmFunctionCaller } from "../../../utils/LlmFunctionCaller"; -export const test_chatgpt_function_calling_additionalProperties = () => - validate_llm_function_calling_additionalProperties({ - vendor: "openai/gpt-4.1", - model: "chatgpt", - }); - -export const test_claude_function_calling_additionalProperties = () => +export const test_llm_function_calling_additionalProperties = () => validate_llm_function_calling_additionalProperties({ vendor: "anthropic/claude-sonnet-4.5", - model: "claude", - }); - -export const test_deepseek_function_calling_additionalProperties = () => - validate_llm_function_calling_additionalProperties({ - vendor: "deepseek/deepseek-v3.1-terminus:exacto", - model: "claude", - }); - -export const test_gemini_function_calling_additionalProperties = () => - validate_llm_function_calling_additionalProperties({ - vendor: "google/gemini-2.5-pro", - model: "gemini", - }); - -export const test_llama_function_calling_additionalProperties = () => - validate_llm_function_calling_additionalProperties({ - vendor: "meta-llama/llama-3.3-70b-instruct", - model: "claude", }); -export const test_qwen_function_calling_additionalProperties = () => - validate_llm_function_calling_additionalProperties({ - vendor: "qwen/qwen3-next-80b-a3b-instruct", - model: "claude", - }); - -const validate_llm_function_calling_additionalProperties = async < - Model extends ILlmSchema.Model, ->(props: { +const validate_llm_function_calling_additionalProperties = async (props: { vendor: string; - model: Model; }) => { - const application: ILlmApplication = - LlmApplicationFactory.convert({ - model: props.model, - application: typia.json.application(), - }); + const application: ILlmApplication = LlmApplicationFactory.convert({ + application: typia.json.application(), + }); return await LlmFunctionCaller.test({ vendor: props.vendor, - model: props.model, function: application.functions[0], texts: [ { @@ -70,7 +33,7 @@ const validate_llm_function_calling_additionalProperties = async < handleParameters: async (parameters) => { if (process.argv.includes("--file")) await fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/function-calling/schemas/chatgpt.additionalProperties.schema.json`, + `${TestGlobal.ROOT}/examples/function-calling/schemas/llm.additionalProperties.schema.json`, JSON.stringify(parameters, null, 2), "utf8", ); @@ -78,7 +41,7 @@ const validate_llm_function_calling_additionalProperties = async < handleCompletion: async (input) => { if (process.argv.includes("--file")) await fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/function-calling/arguments/${props.model}.additionalProperties.input.json`, + `${TestGlobal.ROOT}/examples/function-calling/arguments/llm.additionalProperties.input.json`, JSON.stringify(input, null, 2), "utf8", ); diff --git a/test/src/features/llm/function_calling/validate_llm_function_calling_default.ts b/test/src/features/llm/function_calling/test_llm_function_calling_default.ts similarity index 54% rename from test/src/features/llm/function_calling/validate_llm_function_calling_default.ts rename to test/src/features/llm/function_calling/test_llm_function_calling_default.ts index d9f7bc9f..8a17c967 100644 --- a/test/src/features/llm/function_calling/validate_llm_function_calling_default.ts +++ b/test/src/features/llm/function_calling/test_llm_function_calling_default.ts @@ -1,4 +1,4 @@ -import { ILlmApplication, ILlmSchema } from "@samchon/openapi"; +import { ILlmApplication } from "@samchon/openapi"; import fs from "fs"; import typia, { tags } from "typia"; @@ -6,56 +6,19 @@ import { TestGlobal } from "../../../TestGlobal"; import { LlmApplicationFactory } from "../../../utils/LlmApplicationFactory"; import { LlmFunctionCaller } from "../../../utils/LlmFunctionCaller"; -export const test_chatgpt_function_calling_default = () => - validate_llm_function_calling_default({ - vendor: "openai/gpt-4.1", - model: "chatgpt", - }); - -export const test_claude_function_calling_default = () => +export const test_llm_function_calling_default = () => validate_llm_function_calling_default({ vendor: "anthropic/claude-sonnet-4.5", - model: "claude", - }); - -export const test_deepseek_function_calling_default = () => - validate_llm_function_calling_default({ - vendor: "deepseek/deepseek-v3.1-terminus:exacto", - model: "claude", - }); - -export const test_gemini_function_calling_default = () => - validate_llm_function_calling_default({ - vendor: "google/gemini-2.5-pro", - model: "gemini", - }); - -export const test_llama_function_calling_default = () => - validate_llm_function_calling_default({ - vendor: "meta-llama/llama-3.3-70b-instruct", - model: "claude", }); -export const test_qwen_function_calling_default = () => - validate_llm_function_calling_default({ - vendor: "qwen/qwen3-next-80b-a3b-instruct", - model: "claude", - }); - -const validate_llm_function_calling_default = async < - Model extends ILlmSchema.Model, ->(props: { +const validate_llm_function_calling_default = async (props: { vendor: string; - model: Model; }) => { - const application: ILlmApplication = - LlmApplicationFactory.convert({ - model: props.model, - application: typia.json.application(), - }); + const application: ILlmApplication = LlmApplicationFactory.convert({ + application: typia.json.application(), + }); return await LlmFunctionCaller.test({ vendor: props.vendor, - model: props.model, function: application.functions[0], texts: [ { @@ -70,7 +33,7 @@ const validate_llm_function_calling_default = async < handleParameters: async (parameters) => { if (process.argv.includes("--file")) await fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/function-calling/schemas/${props.model}.default.schema.json`, + `${TestGlobal.ROOT}/examples/function-calling/schemas/llm.default.schema.json`, JSON.stringify(parameters, null, 2), "utf8", ); @@ -78,7 +41,7 @@ const validate_llm_function_calling_default = async < handleCompletion: async (input) => { if (process.argv.includes("--file")) await fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/function-calling/arguments/${props.model}.default.input.json`, + `${TestGlobal.ROOT}/examples/function-calling/arguments/llm.default.input.json`, JSON.stringify(input, null, 2), "utf8", ); diff --git a/test/src/features/llm/function_calling/validate_llm_function_calling_example.ts b/test/src/features/llm/function_calling/test_llm_function_calling_example.ts similarity index 57% rename from test/src/features/llm/function_calling/validate_llm_function_calling_example.ts rename to test/src/features/llm/function_calling/test_llm_function_calling_example.ts index 3d799ba6..6d7dcdc2 100644 --- a/test/src/features/llm/function_calling/validate_llm_function_calling_example.ts +++ b/test/src/features/llm/function_calling/test_llm_function_calling_example.ts @@ -1,4 +1,4 @@ -import { ILlmApplication, ILlmSchema } from "@samchon/openapi"; +import { ILlmApplication } from "@samchon/openapi"; import fs from "fs"; import typia, { tags } from "typia"; @@ -6,56 +6,19 @@ import { TestGlobal } from "../../../TestGlobal"; import { LlmApplicationFactory } from "../../../utils/LlmApplicationFactory"; import { LlmFunctionCaller } from "../../../utils/LlmFunctionCaller"; -export const test_chatgpt_function_calling_example = () => - validate_llm_function_calling_example({ - vendor: "openai/gpt-4.1", - model: "chatgpt", - }); - -export const test_claude_function_calling_example = () => +export const test_llm_function_calling_example = () => validate_llm_function_calling_example({ vendor: "anthropic/claude-sonnet-4.5", - model: "claude", - }); - -export const test_deepseek_function_calling_example = () => - validate_llm_function_calling_example({ - vendor: "deepseek/deepseek-v3.1-terminus:exacto", - model: "claude", - }); - -export const test_gemini_function_calling_example = () => - validate_llm_function_calling_example({ - vendor: "google/gemini-2.5-pro", - model: "gemini", - }); - -export const test_llama_function_calling_example = () => - validate_llm_function_calling_example({ - vendor: "meta-llama/llama-3.3-70b-instruct", - model: "claude", }); -export const test_qwen_function_calling_example = () => - validate_llm_function_calling_example({ - vendor: "qwen/qwen3-next-80b-a3b-instruct", - model: "claude", - }); - -const validate_llm_function_calling_example = async < - Model extends ILlmSchema.Model, ->(props: { +const validate_llm_function_calling_example = async (props: { vendor: string; - model: Model; }) => { - const application: ILlmApplication = - LlmApplicationFactory.convert({ - model: props.model, - application: typia.json.application(), - }); + const application: ILlmApplication = LlmApplicationFactory.convert({ + application: typia.json.application(), + }); return await LlmFunctionCaller.test({ vendor: props.vendor, - model: props.model, function: application.functions[0], texts: [ { @@ -70,7 +33,7 @@ const validate_llm_function_calling_example = async < handleParameters: async (parameters) => { if (process.argv.includes("--file")) await fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/function-calling/schemas/${props.model}.example.schema.json`, + `${TestGlobal.ROOT}/examples/function-calling/schemas/llm.example.schema.json`, JSON.stringify(parameters, null, 2), "utf8", ); @@ -78,7 +41,7 @@ const validate_llm_function_calling_example = async < handleCompletion: async (input) => { if (process.argv.includes("--file")) await fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/function-calling/arguments/${props.model}.example.input.json`, + `${TestGlobal.ROOT}/examples/function-calling/arguments/llm.example.input.json`, JSON.stringify(input, null, 2), "utf8", ); diff --git a/test/src/features/llm/function_calling/validate_llm_function_calling_optional.ts b/test/src/features/llm/function_calling/test_llm_function_calling_optional.ts similarity index 52% rename from test/src/features/llm/function_calling/validate_llm_function_calling_optional.ts rename to test/src/features/llm/function_calling/test_llm_function_calling_optional.ts index a1fdddf1..53ae7cc9 100644 --- a/test/src/features/llm/function_calling/validate_llm_function_calling_optional.ts +++ b/test/src/features/llm/function_calling/test_llm_function_calling_optional.ts @@ -1,4 +1,4 @@ -import { ILlmApplication, ILlmSchema } from "@samchon/openapi"; +import { ILlmApplication } from "@samchon/openapi"; import fs from "fs"; import typia from "typia"; @@ -6,56 +6,19 @@ import { TestGlobal } from "../../../TestGlobal"; import { LlmApplicationFactory } from "../../../utils/LlmApplicationFactory"; import { LlmFunctionCaller } from "../../../utils/LlmFunctionCaller"; -export const test_chatgpt_function_calling_optional = () => - validate_chatgpt_function_calling_optional({ - vendor: "openai/gpt-4.1", - model: "chatgpt", - }); - -export const test_claude_function_calling_optional = () => - validate_chatgpt_function_calling_optional({ +export const test_llm_function_calling_optional = () => + validate_llm_function_calling_optional({ vendor: "anthropic/claude-sonnet-4.5", - model: "claude", - }); - -export const test_deepseek_function_calling_optional = () => - validate_chatgpt_function_calling_optional({ - vendor: "deepseek/deepseek-v3.1-terminus:exacto", - model: "claude", - }); - -export const test_gemini_function_calling_optional = () => - validate_chatgpt_function_calling_optional({ - vendor: "google/gemini-2.5-pro", - model: "gemini", }); -export const test_llama_function_calling_optional = () => - validate_chatgpt_function_calling_optional({ - vendor: "meta-llama/llama-3.3-70b-instruct", - model: "claude", - }); - -export const test_qwen_function_calling_optional = () => - validate_chatgpt_function_calling_optional({ - vendor: "qwen/qwen3-next-80b-a3b-instruct", - model: "claude", - }); - -const validate_chatgpt_function_calling_optional = async < - Model extends ILlmSchema.Model, ->(props: { +const validate_llm_function_calling_optional = async (props: { vendor: string; - model: Model; }) => { - const application: ILlmApplication = - LlmApplicationFactory.convert({ - model: props.model, - application: typia.json.application(), - }); + const application: ILlmApplication = LlmApplicationFactory.convert({ + application: typia.json.application(), + }); return await LlmFunctionCaller.test({ vendor: props.vendor, - model: props.model, function: application.functions[0], texts: [ { @@ -70,7 +33,7 @@ const validate_chatgpt_function_calling_optional = async < handleParameters: async (parameters) => { if (process.argv.includes("--file")) await fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/function-calling/schemas/${props.model}.optional.schema.json`, + `${TestGlobal.ROOT}/examples/function-calling/schemas/llm.optional.schema.json`, JSON.stringify(parameters, null, 2), "utf8", ); @@ -78,7 +41,7 @@ const validate_chatgpt_function_calling_optional = async < handleCompletion: async (input) => { if (process.argv.includes("--file")) await fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/function-calling/arguments/${props.model}.optional.input.json`, + `${TestGlobal.ROOT}/examples/function-calling/arguments/llm.optional.input.json`, JSON.stringify(input, null, 2), "utf8", ); diff --git a/test/src/features/llm/function_calling/test_llm_function_calling_readonly.ts b/test/src/features/llm/function_calling/test_llm_function_calling_readonly.ts new file mode 100644 index 00000000..f2d2da0e --- /dev/null +++ b/test/src/features/llm/function_calling/test_llm_function_calling_readonly.ts @@ -0,0 +1,59 @@ +import typia, { tags } from "typia"; +import { v4 } from "uuid"; + +import { ILlmApplication } from "../../../../../lib"; +import { LlmApplicationFactory } from "../../../utils/LlmApplicationFactory"; +import { LlmFunctionCaller } from "../../../utils/LlmFunctionCaller"; + +export const test_llm_function_calling_readonly = () => + validate_llm_function_calling_readonly({ + vendor: "anthropic/claude-sonnet-4.5", + }); + +const validate_llm_function_calling_readonly = async (props: { + vendor: string; +}) => { + const application: ILlmApplication = LlmApplicationFactory.convert({ + application: typia.json.application(), + }); + for (const p of [ + application.functions[0].parameters.properties.id, + application.functions[0].parameters.properties.created_at, + ]) + (p as any).readOnly = true; + return await LlmFunctionCaller.test({ + vendor: props.vendor, + function: application.functions[0], + texts: [ + { + role: "assistant", + content: SYSTEM_MESSAGE, + }, + { + role: "user", + content: USER_MESSAGE, + }, + ], + handleParameters: async () => {}, + handleCompletion: async () => {}, + }); +}; + +interface IApplication { + participate(member: IMember): void; +} +interface IMember { + readonly id: string & tags.Format<"uuid">; + email: string & tags.Format<"email">; + name: string; + readonly created_at: string & tags.Format<"date-time">; +} + +const SYSTEM_MESSAGE = `You are a helpful assistant for function calling.`; +const USER_MESSAGE = ` + A new member wants to participate. + + The member's id is "${v4()}", and the account's email is "john@doe.com". + The account has been created at "2023-01-01T00:00:00.000Z" + and the member's name is "John Doe". +`; diff --git a/test/src/features/llm/function_calling/validate_llm_function_calling_recursive.ts b/test/src/features/llm/function_calling/test_llm_function_calling_recursive.ts similarity index 65% rename from test/src/features/llm/function_calling/validate_llm_function_calling_recursive.ts rename to test/src/features/llm/function_calling/test_llm_function_calling_recursive.ts index adf9cbc6..599706d3 100644 --- a/test/src/features/llm/function_calling/validate_llm_function_calling_recursive.ts +++ b/test/src/features/llm/function_calling/test_llm_function_calling_recursive.ts @@ -1,4 +1,4 @@ -import { ILlmApplication, ILlmSchema } from "@samchon/openapi"; +import { ILlmApplication } from "@samchon/openapi"; import fs from "fs"; import typia, { tags } from "typia"; @@ -6,50 +6,19 @@ import { TestGlobal } from "../../../TestGlobal"; import { LlmApplicationFactory } from "../../../utils/LlmApplicationFactory"; import { LlmFunctionCaller } from "../../../utils/LlmFunctionCaller"; -export const test_chatgpt_function_calling_recursive = () => - validate_llm_function_calling_recursive({ - vendor: "openai/gpt-4.1", - model: "chatgpt", - }); - -export const test_claude_function_calling_recursive = () => +export const test_llm_function_calling_recursive = () => validate_llm_function_calling_recursive({ vendor: "anthropic/claude-sonnet-4.5", - model: "claude", }); -export const test_deepseek_function_calling_recursive = () => - validate_llm_function_calling_recursive({ - vendor: "deepseek/deepseek-v3.1-terminus:exacto", - model: "claude", - }); - -export const test_gemini_function_calling_recursive = () => - validate_llm_function_calling_recursive({ - vendor: "google/gemini-2.5-pro", - model: "gemini", - }); - -export const test_llama_function_calling_recursive = () => - validate_llm_function_calling_recursive({ - vendor: "meta-llama/llama-3.3-70b-instruct", - model: "claude", - }); - -const validate_llm_function_calling_recursive = async < - Model extends ILlmSchema.Model, ->(props: { +const validate_llm_function_calling_recursive = async (props: { vendor: string; - model: Model; }) => { - const application: ILlmApplication = - LlmApplicationFactory.convert({ - model: props.model, - application: typia.json.application(), - }); + const application: ILlmApplication = LlmApplicationFactory.convert({ + application: typia.json.application(), + }); return await LlmFunctionCaller.test({ vendor: props.vendor, - model: props.model, function: application.functions[0], texts: [ { @@ -64,7 +33,7 @@ const validate_llm_function_calling_recursive = async < handleParameters: async (parameters) => { if (process.argv.includes("--file")) await fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/function-calling/schemas/${props.model}.recursive.schema.json`, + `${TestGlobal.ROOT}/examples/function-calling/schemas/llm.recursive.schema.json`, JSON.stringify(parameters, null, 2), "utf8", ); @@ -72,7 +41,7 @@ const validate_llm_function_calling_recursive = async < handleCompletion: async (input) => { if (process.argv.includes("--file")) await fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/function-calling/arguments/${props.model}.recursive.input.json`, + `${TestGlobal.ROOT}/examples/function-calling/arguments/llm.recursive.input.json`, JSON.stringify(input, null, 2), "utf8", ); diff --git a/test/src/features/llm/function_calling/test_llm_function_calling_sale.ts b/test/src/features/llm/function_calling/test_llm_function_calling_sale.ts new file mode 100644 index 00000000..0695053a --- /dev/null +++ b/test/src/features/llm/function_calling/test_llm_function_calling_sale.ts @@ -0,0 +1,42 @@ +import { ILlmApplication } from "@samchon/openapi"; +import fs from "fs"; +import typia from "typia"; + +import { TestGlobal } from "../../../TestGlobal"; +import { LlmApplicationFactory } from "../../../utils/LlmApplicationFactory"; +import { LlmFunctionCaller } from "../../../utils/LlmFunctionCaller"; +import { ShoppingSalePrompt } from "../../../utils/ShoppingSalePrompt"; + +export const test_llm_function_calling_sale = () => + validate_llm_function_calling_sale({ + vendor: "anthropic/claude-sonnet-4.5", + }); + +const validate_llm_function_calling_sale = async (props: { + vendor: string; +}) => { + const application: ILlmApplication = LlmApplicationFactory.convert({ + application: typia.json.application(), + }); + return await LlmFunctionCaller.test({ + vendor: props.vendor, + function: application.functions[0], + texts: await ShoppingSalePrompt.texts(), + handleParameters: async (parameters) => { + if (process.argv.includes("--file")) + fs.promises.writeFile( + `${TestGlobal.ROOT}/examples/function-calling/schemas/llm.sale.schema.json`, + JSON.stringify(parameters, null, 2), + "utf8", + ); + }, + handleCompletion: async (input) => { + if (process.argv.includes("--file")) + await fs.promises.writeFile( + `${TestGlobal.ROOT}/examples/function-calling/arguments/llm.sale.input.json`, + JSON.stringify(input, null, 2), + "utf8", + ); + }, + }); +}; diff --git a/test/src/features/llm/function_calling/validate_llm_function_calling_tags.ts b/test/src/features/llm/function_calling/test_llm_function_calling_tags.ts similarity index 62% rename from test/src/features/llm/function_calling/validate_llm_function_calling_tags.ts rename to test/src/features/llm/function_calling/test_llm_function_calling_tags.ts index ee5d96d0..0721e127 100644 --- a/test/src/features/llm/function_calling/validate_llm_function_calling_tags.ts +++ b/test/src/features/llm/function_calling/test_llm_function_calling_tags.ts @@ -1,4 +1,4 @@ -import { ILlmApplication, ILlmSchema } from "@samchon/openapi"; +import { ILlmApplication } from "@samchon/openapi"; import fs from "fs"; import typia, { tags } from "typia"; @@ -6,44 +6,19 @@ import { TestGlobal } from "../../../TestGlobal"; import { LlmApplicationFactory } from "../../../utils/LlmApplicationFactory"; import { LlmFunctionCaller } from "../../../utils/LlmFunctionCaller"; -export const test_chatgpt_function_calling_tags = () => - validate_llm_function_calling_tags({ - vendor: "openai/gpt-4.1", - model: "chatgpt", - }); - -export const test_claude_function_calling_tags = () => +export const test_llm_function_calling_tags = () => validate_llm_function_calling_tags({ vendor: "anthropic/claude-sonnet-4.5", - model: "claude", - }); - -export const test_gemini_function_calling_tags = () => - validate_llm_function_calling_tags({ - vendor: "google/gemini-2.5-pro", - model: "gemini", }); -export const test_llama_function_calling_tags = () => - validate_llm_function_calling_tags({ - vendor: "meta-llama/llama-3.3-70b-instruct", - model: "claude", - }); - -const validate_llm_function_calling_tags = async < - Model extends ILlmSchema.Model, ->(props: { +const validate_llm_function_calling_tags = async (props: { vendor: string; - model: Model; }) => { - const application: ILlmApplication = - LlmApplicationFactory.convert({ - model: props.model, - application: typia.json.application(), - }); + const application: ILlmApplication = LlmApplicationFactory.convert({ + application: typia.json.application(), + }); return await LlmFunctionCaller.test({ vendor: props.vendor, - model: props.model, function: application.functions[0], texts: [ { @@ -58,7 +33,7 @@ const validate_llm_function_calling_tags = async < handleParameters: async (parameters) => { if (process.argv.includes("--file")) await fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/function-calling/schemas/${props.model}.tags.schema.json`, + `${TestGlobal.ROOT}/examples/function-calling/schemas/llm.tags.schema.json`, JSON.stringify(parameters, null, 2), "utf8", ); @@ -66,7 +41,7 @@ const validate_llm_function_calling_tags = async < handleCompletion: async (input) => { if (process.argv.includes("--file")) await fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/function-calling/arguments/${props.model}.tags.input.json`, + `${TestGlobal.ROOT}/examples/function-calling/arguments/llm.tags.input.json`, JSON.stringify(input, null, 2), "utf8", ); diff --git a/test/src/features/llm/function_calling/validate_llm_function_calling_union.ts b/test/src/features/llm/function_calling/test_llm_function_calling_union.ts similarity index 58% rename from test/src/features/llm/function_calling/validate_llm_function_calling_union.ts rename to test/src/features/llm/function_calling/test_llm_function_calling_union.ts index 745e6202..e0920b46 100644 --- a/test/src/features/llm/function_calling/validate_llm_function_calling_union.ts +++ b/test/src/features/llm/function_calling/test_llm_function_calling_union.ts @@ -1,4 +1,4 @@ -import { ILlmApplication, ILlmSchema } from "@samchon/openapi"; +import { ILlmApplication } from "@samchon/openapi"; import fs from "fs"; import typia from "typia"; @@ -6,56 +6,19 @@ import { TestGlobal } from "../../../TestGlobal"; import { LlmApplicationFactory } from "../../../utils/LlmApplicationFactory"; import { LlmFunctionCaller } from "../../../utils/LlmFunctionCaller"; -export const test_chatgpt_function_calling_union = () => - validate_llm_function_calling_union({ - vendor: "openai/gpt-4.1", - model: "chatgpt", - }); - -export const test_claude_function_calling_union = () => +export const test_llm_function_calling_union = () => validate_llm_function_calling_union({ vendor: "anthropic/claude-sonnet-4.5", - model: "claude", - }); - -export const test_deepseek_function_calling_union = () => - validate_llm_function_calling_union({ - vendor: "deepseek/deepseek-v3.1-terminus:exacto", - model: "claude", - }); - -export const test_gemini_function_calling_union = () => - validate_llm_function_calling_union({ - vendor: "google/gemini-2.5-pro", - model: "gemini", - }); - -export const test_llama_function_calling_union = () => - validate_llm_function_calling_union({ - vendor: "meta-llama/llama-3.3-70b-instruct", - model: "claude", }); -export const test_qwen_function_calling_union = () => - validate_llm_function_calling_union({ - vendor: "qwen/qwen3-next-80b-a3b-instruct", - model: "claude", - }); - -const validate_llm_function_calling_union = async < - Model extends ILlmSchema.Model, ->(props: { +const validate_llm_function_calling_union = async (props: { vendor: string; - model: Model; }) => { - const application: ILlmApplication = - LlmApplicationFactory.convert({ - model: props.model, - application: typia.json.application(), - }); + const application: ILlmApplication = LlmApplicationFactory.convert({ + application: typia.json.application(), + }); return await LlmFunctionCaller.test({ vendor: props.vendor, - model: props.model, function: application.functions[0], texts: [ { @@ -70,7 +33,7 @@ const validate_llm_function_calling_union = async < handleParameters: async (parameters) => { if (process.argv.includes("--file")) await fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/function-calling/schemas/${props.model}.union.schema.json`, + `${TestGlobal.ROOT}/examples/function-calling/schemas/llm.union.schema.json`, JSON.stringify(parameters, null, 2), "utf8", ); @@ -78,7 +41,7 @@ const validate_llm_function_calling_union = async < handleCompletion: async (input) => { if (process.argv.includes("--file")) await fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/function-calling/arguments/${props.model}.union.input.json`, + `${TestGlobal.ROOT}/examples/function-calling/arguments/llm.union.input.json`, JSON.stringify(input, null, 2), "utf8", ); diff --git a/test/src/features/llm/function_calling/validate_llm_function_calling_readonly.ts b/test/src/features/llm/function_calling/validate_llm_function_calling_readonly.ts deleted file mode 100644 index ad63171a..00000000 --- a/test/src/features/llm/function_calling/validate_llm_function_calling_readonly.ts +++ /dev/null @@ -1,103 +0,0 @@ -import typia, { tags } from "typia"; -import { v4 } from "uuid"; - -import { ILlmApplication, ILlmSchema } from "../../../../../lib"; -import { LlmApplicationFactory } from "../../../utils/LlmApplicationFactory"; -import { LlmFunctionCaller } from "../../../utils/LlmFunctionCaller"; - -export const test_chatgpt_function_calling_strict_readonly = () => - validate_llm_function_calling_readonly({ - vendor: "openai/gpt-4.1", - model: "chatgpt", - config: { - reference: true, - strict: true, - }, - }); - -export const test_chatgpt_function_calling_readonly = () => - validate_llm_function_calling_readonly({ - vendor: "openai/gpt-4.1", - model: "chatgpt", - }); - -export const test_claude_function_calling_readonly = () => - validate_llm_function_calling_readonly({ - vendor: "anthropic/claude-sonnet-4.5", - model: "claude", - }); - -export const test_deepseek_function_calling_readonly = () => - validate_llm_function_calling_readonly({ - vendor: "deepseek/deepseek-v3.1-terminus:exacto", - model: "claude", - }); - -export const test_gemini_function_calling_readonly = () => - validate_llm_function_calling_readonly({ - vendor: "google/gemini-2.5-pro", - model: "gemini", - }); - -export const test_llama_function_calling_readonly = () => - validate_llm_function_calling_readonly({ - vendor: "meta-llama/llama-3.3-70b-instruct", - model: "claude", - }); - -const validate_llm_function_calling_readonly = async < - Model extends ILlmSchema.Model, ->(props: { - vendor: string; - model: Model; - config?: ILlmSchema.IConfig; -}) => { - const application: ILlmApplication = - LlmApplicationFactory.convert({ - model: props.model, - application: typia.json.application(), - config: props.config, - }); - for (const p of [ - application.functions[0].parameters.properties.id, - application.functions[0].parameters.properties.created_at, - ]) - (p as any).readOnly = true; - return await LlmFunctionCaller.test({ - vendor: props.vendor, - model: props.model, - function: application.functions[0], - texts: [ - { - role: "assistant", - content: SYSTEM_MESSAGE, - }, - { - role: "user", - content: USER_MESSAGE, - }, - ], - handleParameters: async () => {}, - handleCompletion: async () => {}, - strict: (props.config as any)?.strict, - }); -}; - -interface IApplication { - participate(member: IMember): void; -} -interface IMember { - readonly id: string & tags.Format<"uuid">; - email: string & tags.Format<"email">; - name: string; - readonly created_at: string & tags.Format<"date-time">; -} - -const SYSTEM_MESSAGE = `You are a helpful assistant for function calling.`; -const USER_MESSAGE = ` - A new member wants to participate. - - The member's id is "${v4()}", and the account's email is "john@doe.com". - The account has been created at "2023-01-01T00:00:00.000Z" - and the member's name is "John Doe". -`; diff --git a/test/src/features/llm/function_calling/validate_llm_function_calling_sale.ts b/test/src/features/llm/function_calling/validate_llm_function_calling_sale.ts deleted file mode 100644 index e8080d6c..00000000 --- a/test/src/features/llm/function_calling/validate_llm_function_calling_sale.ts +++ /dev/null @@ -1,79 +0,0 @@ -import { ILlmApplication, ILlmSchema } from "@samchon/openapi"; -import fs from "fs"; -import typia from "typia"; - -import { TestGlobal } from "../../../TestGlobal"; -import { LlmApplicationFactory } from "../../../utils/LlmApplicationFactory"; -import { LlmFunctionCaller } from "../../../utils/LlmFunctionCaller"; -import { ShoppingSalePrompt } from "../../../utils/ShoppingSalePrompt"; - -export const test_chatgpt_function_calling_sale = () => - validate_llm_function_calling_sale({ - vendor: "openai/gpt-4.1", - model: "chatgpt", - }); - -export const test_claude_function_calling_sale = () => - validate_llm_function_calling_sale({ - vendor: "anthropic/claude-sonnet-4.5", - model: "claude", - }); - -export const test_deepseek_function_calling_sale = () => - validate_llm_function_calling_sale({ - vendor: "deepseek/deepseek-v3.1-terminus:exacto", - model: "claude", - }); - -export const test_gemini_function_calling_sale = () => - validate_llm_function_calling_sale({ - vendor: "google/gemini-2.5-pro", - model: "gemini", - }); - -export const test_llama_function_calling_sale = () => - validate_llm_function_calling_sale({ - vendor: "meta-llama/llama-3.3-70b-instruct", - model: "claude", - }); - -export const test_qwen_function_calling_sale = () => - validate_llm_function_calling_sale({ - vendor: "qwen/qwen3-next-80b-a3b-instruct", - model: "claude", - }); - -const validate_llm_function_calling_sale = async < - Model extends ILlmSchema.Model, ->(props: { - vendor: string; - model: Model; -}) => { - const application: ILlmApplication = - LlmApplicationFactory.convert({ - model: props.model, - application: typia.json.application(), - }); - return await LlmFunctionCaller.test({ - vendor: props.vendor, - model: props.model, - function: application.functions[0], - texts: await ShoppingSalePrompt.texts(), - handleParameters: async (parameters) => { - if (process.argv.includes("--file")) - fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/function-calling/schemas/${props.model}.sale.schema.json`, - JSON.stringify(parameters, null, 2), - "utf8", - ); - }, - handleCompletion: async (input) => { - if (process.argv.includes("--file")) - await fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/function-calling/arguments/${props.model}.sale.input.json`, - JSON.stringify(input, null, 2), - "utf8", - ); - }, - }); -}; diff --git a/test/src/features/llm/http/test_http_llm_application.ts b/test/src/features/llm/http/test_http_llm_application.ts index 970653e8..b851bfd8 100644 --- a/test/src/features/llm/http/test_http_llm_application.ts +++ b/test/src/features/llm/http/test_http_llm_application.ts @@ -15,10 +15,8 @@ export const test_http_llm_application = async (): Promise => { await fs.promises.readFile(`${TestGlobal.ROOT}/swagger.json`, "utf8"), ), ); - const application: IHttpLlmApplication<"3.0"> = HttpLlm.application({ - model: "3.0", + const application: IHttpLlmApplication = HttpLlm.application({ document, - options: {}, }); for (const func of application.functions) { const route: IHttpMigrateRoute = func.route(); diff --git a/test/src/features/llm/http/test_http_llm_application_function_name_length.ts b/test/src/features/llm/http/test_http_llm_application_function_name_length.ts index f7455da6..0d6b2b5b 100644 --- a/test/src/features/llm/http/test_http_llm_application_function_name_length.ts +++ b/test/src/features/llm/http/test_http_llm_application_function_name_length.ts @@ -8,8 +8,7 @@ export const test_http_llm_application_function_name_length = "https://wrtnlabs.github.io/connectors/swagger/swagger.json", ).then((res) => res.json()), ); - const application: IHttpLlmApplication<"chatgpt"> = HttpLlm.application({ - model: "chatgpt", + const application: IHttpLlmApplication = HttpLlm.application({ document, }); diff --git a/test/src/features/llm/http/test_http_llm_application_human.ts b/test/src/features/llm/http/test_http_llm_application_human.ts index 759229ed..8e5d70e5 100644 --- a/test/src/features/llm/http/test_http_llm_application_human.ts +++ b/test/src/features/llm/http/test_http_llm_application_human.ts @@ -10,10 +10,8 @@ export const test_http_llm_application = async (): Promise => { await fs.promises.readFile(`${TestGlobal.ROOT}/swagger.json`, "utf8"), ), ); - const application: IHttpLlmApplication<"3.0"> = HttpLlm.application({ - model: "3.0", + const application: IHttpLlmApplication = HttpLlm.application({ document, - options: {}, }); const humanSwagger: OpenApi.IDocument = JSON.parse(JSON.stringify(document)); @@ -22,10 +20,8 @@ export const test_http_llm_application = async (): Promise => { .post as OpenApi.IOperation )["x-samchon-human"] = true; const humanDocument: OpenApi.IDocument = OpenApi.convert(humanSwagger as any); - const humanApplication: IHttpLlmApplication<"3.0"> = HttpLlm.application({ - model: "3.0", + const humanApplication: IHttpLlmApplication = HttpLlm.application({ document: humanDocument, - options: {}, }); TestValidator.equals("length")(application.functions.length)( diff --git a/test/src/features/llm/http/test_http_llm_fetcher_body.ts b/test/src/features/llm/http/test_http_llm_fetcher_body.ts index 3b57c217..121fb9cb 100644 --- a/test/src/features/llm/http/test_http_llm_fetcher_body.ts +++ b/test/src/features/llm/http/test_http_llm_fetcher_body.ts @@ -5,9 +5,9 @@ import { IHttpLlmApplication, IHttpLlmFunction, IHttpResponse, + LlmTypeChecker, OpenApi, } from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import fs from "fs"; import { TestGlobal } from "../../../TestGlobal"; @@ -20,16 +20,14 @@ export const test_http_llm_fetcher_body = async ( await fs.promises.readFile(`${TestGlobal.ROOT}/swagger.json`, "utf8"), ), ); - const application: IHttpLlmApplication<"3.0"> = HttpLlm.application({ - model: "3.0", + const application: IHttpLlmApplication = HttpLlm.application({ document, - options: { + config: { separate: (schema) => - LlmSchemaComposer.typeChecker("3.0").isString(schema) && - !!schema.contentMediaType, + LlmTypeChecker.isString(schema) && !!schema.contentMediaType, }, }); - const func: IHttpLlmFunction<"3.0"> | undefined = application.functions.find( + const func: IHttpLlmFunction | undefined = application.functions.find( (f) => f.path === "/{index}/{level}/{optimal}/body" && f.method === "post", ); if (func === undefined) throw new Error("Function not found"); diff --git a/test/src/features/llm/http/test_http_llm_fetcher_parameters.ts b/test/src/features/llm/http/test_http_llm_fetcher_parameters.ts index b6ffa7a4..2710d842 100644 --- a/test/src/features/llm/http/test_http_llm_fetcher_parameters.ts +++ b/test/src/features/llm/http/test_http_llm_fetcher_parameters.ts @@ -5,9 +5,9 @@ import { IHttpLlmApplication, IHttpLlmFunction, IHttpResponse, + LlmTypeChecker, OpenApi, } from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import fs from "fs"; import { TestGlobal } from "../../../TestGlobal"; @@ -20,16 +20,14 @@ export const test_http_llm_fetcher_parameters = async ( await fs.promises.readFile(`${TestGlobal.ROOT}/swagger.json`, "utf8"), ), ); - const application: IHttpLlmApplication<"3.0"> = HttpLlm.application({ - model: "3.0", + const application: IHttpLlmApplication = HttpLlm.application({ document, - options: { + config: { separate: (schema) => - LlmSchemaComposer.typeChecker("3.0").isString(schema) && - !!schema.contentMediaType, + LlmTypeChecker.isString(schema) && !!schema.contentMediaType, }, }); - const func: IHttpLlmFunction<"3.0"> | undefined = application.functions.find( + const func: IHttpLlmFunction | undefined = application.functions.find( (f) => f.path === "/{index}/{level}/{optimal}/parameters" && f.method === "get", ); diff --git a/test/src/features/llm/http/test_http_llm_fetcher_query.ts b/test/src/features/llm/http/test_http_llm_fetcher_query.ts index 0dfbbeca..a5f8d9b6 100644 --- a/test/src/features/llm/http/test_http_llm_fetcher_query.ts +++ b/test/src/features/llm/http/test_http_llm_fetcher_query.ts @@ -5,9 +5,9 @@ import { IHttpLlmApplication, IHttpLlmFunction, IHttpResponse, + LlmTypeChecker, OpenApi, } from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import fs from "fs"; import { TestGlobal } from "../../../TestGlobal"; @@ -20,16 +20,14 @@ export const test_http_llm_fetcher_query = async ( await fs.promises.readFile(`${TestGlobal.ROOT}/swagger.json`, "utf8"), ), ); - const application: IHttpLlmApplication<"3.0"> = HttpLlm.application({ - model: "3.0", + const application: IHttpLlmApplication = HttpLlm.application({ document: document, - options: { + config: { separate: (schema) => - LlmSchemaComposer.typeChecker("3.0").isString(schema) && - !!schema.contentMediaType, + LlmTypeChecker.isString(schema) && !!schema.contentMediaType, }, }); - const func: IHttpLlmFunction<"3.0"> | undefined = application.functions.find( + const func: IHttpLlmFunction | undefined = application.functions.find( (f) => f.path === "/{index}/{level}/{optimal}/query" && f.method === "get", ); if (func === undefined) throw new Error("Function not found"); diff --git a/test/src/features/llm/http/test_http_llm_fetcher_query_and_body.ts b/test/src/features/llm/http/test_http_llm_fetcher_query_and_body.ts index 53782746..45bcf2b1 100644 --- a/test/src/features/llm/http/test_http_llm_fetcher_query_and_body.ts +++ b/test/src/features/llm/http/test_http_llm_fetcher_query_and_body.ts @@ -5,9 +5,9 @@ import { IHttpLlmApplication, IHttpLlmFunction, IHttpResponse, + LlmTypeChecker, OpenApi, } from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import fs from "fs"; import { TestGlobal } from "../../../TestGlobal"; @@ -20,16 +20,14 @@ export const test_http_llm_fetcher_query_and_body = async ( await fs.promises.readFile(`${TestGlobal.ROOT}/swagger.json`, "utf8"), ), ); - const application: IHttpLlmApplication<"3.0"> = HttpLlm.application({ - model: "3.0", + const application: IHttpLlmApplication = HttpLlm.application({ document, - options: { + config: { separate: (schema) => - LlmSchemaComposer.typeChecker("3.0").isString(schema) && - !!schema.contentMediaType, + LlmTypeChecker.isString(schema) && !!schema.contentMediaType, }, }); - const func: IHttpLlmFunction<"3.0"> | undefined = application.functions.find( + const func: IHttpLlmFunction | undefined = application.functions.find( (f) => f.path === "/{index}/{level}/{optimal}/query/body" && f.method === "post", ); diff --git a/test/src/features/llm/http/test_http_llm_function_deprecated.ts b/test/src/features/llm/http/test_http_llm_function_deprecated.ts index fab26c8c..0a2db9a7 100644 --- a/test/src/features/llm/http/test_http_llm_function_deprecated.ts +++ b/test/src/features/llm/http/test_http_llm_function_deprecated.ts @@ -15,11 +15,10 @@ export const test_http_llm_function_deprecated = async (): Promise => { await fs.promises.readFile(`${TestGlobal.ROOT}/swagger.json`, "utf8"), ), ); - const application: IHttpLlmApplication<"3.0"> = HttpLlm.application({ - model: "3.0", + const application: IHttpLlmApplication = HttpLlm.application({ document, }); - const func: IHttpLlmFunction<"3.0"> | undefined = application.functions.find( + const func: IHttpLlmFunction | undefined = application.functions.find( (f) => f.method === "get" && f.path === "/nothing", ); TestValidator.equals("deprecated")(func?.deprecated)(true); diff --git a/test/src/features/llm/http/test_http_llm_function_multipart.ts b/test/src/features/llm/http/test_http_llm_function_multipart.ts index 707ab1a7..e10c18b3 100644 --- a/test/src/features/llm/http/test_http_llm_function_multipart.ts +++ b/test/src/features/llm/http/test_http_llm_function_multipart.ts @@ -10,8 +10,7 @@ export const test_http_llm_function_multipart = async (): Promise => { await fs.promises.readFile(`${TestGlobal.ROOT}/swagger.json`, "utf8"), ), ); - const application: IHttpLlmApplication<"3.0"> = HttpLlm.application({ - model: "3.0", + const application: IHttpLlmApplication = HttpLlm.application({ document, }); TestValidator.equals("multipart not supported")( diff --git a/test/src/features/llm/http/test_http_llm_function_tags.ts b/test/src/features/llm/http/test_http_llm_function_tags.ts index 21043876..fbef8260 100644 --- a/test/src/features/llm/http/test_http_llm_function_tags.ts +++ b/test/src/features/llm/http/test_http_llm_function_tags.ts @@ -15,11 +15,10 @@ export const test_http_llm_function_deprecated = async (): Promise => { await fs.promises.readFile(`${TestGlobal.ROOT}/swagger.json`, "utf8"), ), ); - const application: IHttpLlmApplication<"3.0"> = HttpLlm.application({ - model: "3.0", + const application: IHttpLlmApplication = HttpLlm.application({ document, }); - const func: IHttpLlmFunction<"3.0"> | undefined = application.functions.find( + const func: IHttpLlmFunction | undefined = application.functions.find( (f) => f.method === "post" && f.path === "/{index}/{level}/{optimal}/body", ); TestValidator.equals("tags")(func?.tags)(["body", "post"]); diff --git a/test/src/features/llm/http/test_http_llm_merge_parameters.ts b/test/src/features/llm/http/test_http_llm_merge_parameters.ts index 8e5e19e5..576ba6d6 100644 --- a/test/src/features/llm/http/test_http_llm_merge_parameters.ts +++ b/test/src/features/llm/http/test_http_llm_merge_parameters.ts @@ -1,5 +1,5 @@ import { TestValidator } from "@nestia/e2e"; -import { HttpLlm } from "@samchon/openapi"; +import { HttpLlm, ILlmSchema } from "@samchon/openapi"; export const test_http_llm_merge_parameters = (): void => { TestValidator.equals("atomics")( @@ -16,7 +16,8 @@ export const test_http_llm_merge_parameters = (): void => { }, additionalProperties: false, required: ["a", "b", "c", "d"], - }, + $defs: {}, + } satisfies ILlmSchema.IParameters, separated: { human: { type: "object", @@ -26,7 +27,8 @@ export const test_http_llm_merge_parameters = (): void => { }, additionalProperties: false, required: ["a", "b"], - }, + $defs: {}, + } satisfies ILlmSchema.IParameters, llm: { type: "object", properties: { @@ -35,7 +37,8 @@ export const test_http_llm_merge_parameters = (): void => { }, additionalProperties: false, required: ["c", "d"], - }, + $defs: {}, + } satisfies ILlmSchema.IParameters, }, validate: null!, }, diff --git a/test/src/features/llm/invert/validate_llm_invert_array.ts b/test/src/features/llm/invert/test_llm_invert_array.ts similarity index 50% rename from test/src/features/llm/invert/validate_llm_invert_array.ts rename to test/src/features/llm/invert/test_llm_invert_array.ts index b1d4d4ec..5f9bb6fb 100644 --- a/test/src/features/llm/invert/validate_llm_invert_array.ts +++ b/test/src/features/llm/invert/test_llm_invert_array.ts @@ -3,37 +3,19 @@ import { ILlmSchema } from "@samchon/openapi"; import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import typia, { IJsonSchemaCollection, tags } from "typia"; -export const test_chatgpt_invert_array = (): void => - validate_llm_invert_array("chatgpt"); - -export const test_claude_invert_array = (): void => - validate_llm_invert_array("claude"); - -export const test_gemini_invert_array = (): void => - validate_llm_invert_array("gemini"); - -export const test_llm_v30_invert_array = (): void => - validate_llm_invert_array("3.0"); - -export const test_llm_v31_invert_array = (): void => - validate_llm_invert_array("3.1"); - -const validate_llm_invert_array = ( - model: Model, -): void => { +export const test_llm_invert_array = (): void => { const collection: IJsonSchemaCollection = typia.json.schemas< [Array & tags.MinItems<1> & tags.MaxItems<10> & tags.UniqueItems] >(); - const $defs: Record> = {}; - const converted = LlmSchemaComposer.schema(model)({ + const $defs: Record = {}; + const converted = LlmSchemaComposer.schema({ components: collection.components, schema: collection.schemas[0], - config: LlmSchemaComposer.defaultConfig(model) as any, $defs: $defs as any, }); if (converted.success === false) throw new Error(converted.error.message); - const inverted = LlmSchemaComposer.invert(model)({ + const inverted = LlmSchemaComposer.invert({ $defs, components: collection.components, schema: converted.value, diff --git a/test/src/features/llm/invert/validate_llm_invert_enum.ts b/test/src/features/llm/invert/test_llm_invert_enum.ts similarity index 52% rename from test/src/features/llm/invert/validate_llm_invert_enum.ts rename to test/src/features/llm/invert/test_llm_invert_enum.ts index ed9a32ef..e298e551 100644 --- a/test/src/features/llm/invert/validate_llm_invert_enum.ts +++ b/test/src/features/llm/invert/test_llm_invert_enum.ts @@ -3,34 +3,16 @@ import { ILlmSchema } from "@samchon/openapi"; import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import typia, { IJsonSchemaCollection } from "typia"; -export const test_chatgpt_invert_enum = (): void => - validate_llm_invert_enum("chatgpt"); - -export const test_claude_invert_enum = (): void => - validate_llm_invert_enum("claude"); - -export const test_gemini_invert_enum = (): void => - validate_llm_invert_enum("gemini"); - -export const test_llm_v30_invert_enum = (): void => - validate_llm_invert_enum("3.0"); - -export const test_llm_v31_invert_enum = (): void => - validate_llm_invert_enum("3.1"); - -const validate_llm_invert_enum = ( - model: Model, -): void => { +export const test_llm_invert_enum = (): void => { const validate = (collection: IJsonSchemaCollection) => { - const $defs: Record> = {}; - const converted = LlmSchemaComposer.schema(model)({ + const $defs: Record = {}; + const converted = LlmSchemaComposer.schema({ components: collection.components, schema: collection.schemas[0], - config: LlmSchemaComposer.defaultConfig(model) as any, $defs: $defs as any, }); if (converted.success === false) throw new Error(converted.error.message); - const inverted = LlmSchemaComposer.invert(model)({ + const inverted = LlmSchemaComposer.invert({ $defs, components: collection.components, schema: collection.schemas[0], diff --git a/test/src/features/llm/invert/validate_llm_invert_integer.ts b/test/src/features/llm/invert/test_llm_invert_integer.ts similarity index 59% rename from test/src/features/llm/invert/validate_llm_invert_integer.ts rename to test/src/features/llm/invert/test_llm_invert_integer.ts index 7c0fd4f5..d6e2c0b8 100644 --- a/test/src/features/llm/invert/validate_llm_invert_integer.ts +++ b/test/src/features/llm/invert/test_llm_invert_integer.ts @@ -3,34 +3,16 @@ import { ILlmSchema } from "@samchon/openapi"; import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import typia, { IJsonSchemaCollection, tags } from "typia"; -export const test_chatgpt_invert_integer = (): void => - validate_llm_invert_integer("chatgpt"); - -export const test_claude_invert_integer = (): void => - validate_llm_invert_integer("claude"); - -export const test_gemini_invert_integer = (): void => - validate_llm_invert_integer("gemini"); - -export const test_llm_v30_invert_integer = (): void => - validate_llm_invert_integer("3.0"); - -export const test_llm_v31_invert_integer = (): void => - validate_llm_invert_integer("3.1"); - -const validate_llm_invert_integer = ( - model: Model, -): void => { +export const test_llm_invert_integer = (): void => { const validate = (collection: IJsonSchemaCollection) => { - const $defs: Record> = {}; - const converted = LlmSchemaComposer.schema(model)({ + const $defs: Record = {}; + const converted = LlmSchemaComposer.schema({ components: collection.components, schema: collection.schemas[0], - config: LlmSchemaComposer.defaultConfig(model) as any, $defs: $defs as any, }); if (converted.success === false) throw new Error(converted.error.message); - const inverted = LlmSchemaComposer.invert(model)({ + const inverted = LlmSchemaComposer.invert({ $defs, components: collection.components, schema: converted.value, diff --git a/test/src/features/llm/invert/validate_llm_invert_nullable.ts b/test/src/features/llm/invert/test_llm_invert_nullable.ts similarity index 68% rename from test/src/features/llm/invert/validate_llm_invert_nullable.ts rename to test/src/features/llm/invert/test_llm_invert_nullable.ts index 3cec1359..934b5881 100644 --- a/test/src/features/llm/invert/validate_llm_invert_nullable.ts +++ b/test/src/features/llm/invert/test_llm_invert_nullable.ts @@ -3,34 +3,16 @@ import { ILlmSchema } from "@samchon/openapi"; import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import typia, { IJsonSchemaCollection, tags } from "typia"; -export const test_chatgpt_invert_nullable = (): void => - validate_llm_invert_nullable("chatgpt"); - -export const test_claude_invert_nullable = (): void => - validate_llm_invert_nullable("claude"); - -export const test_gemini_invert_nullable = (): void => - validate_llm_invert_nullable("gemini"); - -export const test_llm_v30_invert_nullable = (): void => - validate_llm_invert_nullable("3.0"); - -export const test_llm_v31_invert_nullable = (): void => - validate_llm_invert_nullable("3.1"); - -const validate_llm_invert_nullable = ( - model: Model, -): void => { +export const test_llm_invert_nullable = (): void => { const validate = (collection: IJsonSchemaCollection) => { - const $defs: Record> = {}; - const converted = LlmSchemaComposer.schema(model)({ + const $defs: Record = {}; + const converted = LlmSchemaComposer.schema({ components: collection.components, schema: collection.schemas[0], - config: LlmSchemaComposer.defaultConfig(model) as any, $defs: $defs as any, }); if (converted.success === false) throw new Error(converted.error.message); - const inverted = LlmSchemaComposer.invert(model)({ + const inverted = LlmSchemaComposer.invert({ $defs, components: collection.components, schema: converted.value, diff --git a/test/src/features/llm/invert/validate_llm_invert_number.ts b/test/src/features/llm/invert/test_llm_invert_number.ts similarity index 55% rename from test/src/features/llm/invert/validate_llm_invert_number.ts rename to test/src/features/llm/invert/test_llm_invert_number.ts index df1601d0..cd6a7506 100644 --- a/test/src/features/llm/invert/validate_llm_invert_number.ts +++ b/test/src/features/llm/invert/test_llm_invert_number.ts @@ -3,34 +3,16 @@ import { ILlmSchema } from "@samchon/openapi"; import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import typia, { IJsonSchemaCollection, tags } from "typia"; -export const test_chatgpt_invert_number = (): void => - validate_llm_invert_number("chatgpt"); - -export const test_claude_invert_number = (): void => - validate_llm_invert_number("claude"); - -export const test_gemini_invert_number = (): void => - validate_llm_invert_number("gemini"); - -export const test_llm_v30_invert_number = (): void => - validate_llm_invert_number("3.0"); - -export const test_llm_v31_invert_number = (): void => - validate_llm_invert_number("3.1"); - -const validate_llm_invert_number = ( - model: Model, -): void => { +export const test_llm_invert_number = (): void => { const validate = (collection: IJsonSchemaCollection) => { - const $defs: Record> = {}; - const converted = LlmSchemaComposer.schema(model)({ + const $defs: Record = {}; + const converted = LlmSchemaComposer.schema({ components: collection.components, schema: collection.schemas[0], - config: LlmSchemaComposer.defaultConfig(model) as any, $defs: $defs as any, }); if (converted.success === false) throw new Error(converted.error.message); - const inverted = LlmSchemaComposer.invert(model)({ + const inverted = LlmSchemaComposer.invert({ $defs, components: collection.components, schema: converted.value, diff --git a/test/src/features/llm/invert/validate_llm_invert_object.ts b/test/src/features/llm/invert/test_llm_invert_object.ts similarity index 57% rename from test/src/features/llm/invert/validate_llm_invert_object.ts rename to test/src/features/llm/invert/test_llm_invert_object.ts index 3953982b..60136d69 100644 --- a/test/src/features/llm/invert/validate_llm_invert_object.ts +++ b/test/src/features/llm/invert/test_llm_invert_object.ts @@ -1,26 +1,9 @@ import { TestValidator } from "@nestia/e2e"; -import { ILlmSchema, OpenApi } from "@samchon/openapi"; +import { OpenApi } from "@samchon/openapi"; import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import typia, { IJsonSchemaCollection, tags } from "typia"; -export const test_chatgpt_invert_object = (): void => - validate_llm_invert_object("chatgpt"); - -export const test_claude_invert_object = (): void => - validate_llm_invert_object("claude"); - -export const test_gemini_invert_object = (): void => - validate_llm_invert_object("gemini"); - -export const test_llm_v30_invert_object = (): void => - validate_llm_invert_object("3.0"); - -export const test_llm_v31_invert_object = (): void => - validate_llm_invert_object("3.1"); - -const validate_llm_invert_object = ( - model: Model, -): void => { +export const test_llm_invert_object = (): void => { const collection: IJsonSchemaCollection = typia.json.schemas< [ { @@ -38,15 +21,12 @@ const validate_llm_invert_object = ( }, ] >(); - const converted = LlmSchemaComposer.parameters(model)({ - config: { - reference: true, - } as any, + const converted = LlmSchemaComposer.parameters({ components: collection.components, schema: collection.schemas[0] as OpenApi.IJsonSchema.IReference, }); if (converted.success === false) throw new Error(converted.error.message); - const inverted = LlmSchemaComposer.invert(model)({ + const inverted = LlmSchemaComposer.invert({ $defs: (converted.value as any).$defs, components: collection.components, schema: converted.value, diff --git a/test/src/features/llm/invert/validate_llm_invert_oneof.ts b/test/src/features/llm/invert/test_llm_invert_oneof.ts similarity index 60% rename from test/src/features/llm/invert/validate_llm_invert_oneof.ts rename to test/src/features/llm/invert/test_llm_invert_oneof.ts index a66c5aba..78f29af7 100644 --- a/test/src/features/llm/invert/validate_llm_invert_oneof.ts +++ b/test/src/features/llm/invert/test_llm_invert_oneof.ts @@ -3,31 +3,16 @@ import { ILlmSchema } from "@samchon/openapi"; import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import typia, { IJsonSchemaCollection } from "typia"; -export const test_chatgpt_invert_oneof = (): void => - validate_llm_invert_oneof("chatgpt"); - -export const test_claude_invert_oneof = (): void => - validate_llm_invert_oneof("claude"); - -export const test_llm_v30_invert_oneof = (): void => - validate_llm_invert_oneof("3.0"); - -export const test_llm_v31_invert_oneof = (): void => - validate_llm_invert_oneof("3.1"); - -const validate_llm_invert_oneof = ( - model: Model, -): void => { +export const test_llm_invert_oneof = (): void => { const validate = (collection: IJsonSchemaCollection) => { - const $defs: Record> = {}; - const converted = LlmSchemaComposer.schema(model)({ + const $defs: Record = {}; + const converted = LlmSchemaComposer.schema({ components: collection.components, schema: collection.schemas[0], - config: LlmSchemaComposer.defaultConfig(model) as any, $defs: $defs as any, }); if (converted.success === false) throw new Error(converted.error.message); - const inverted = LlmSchemaComposer.invert(model)({ + const inverted = LlmSchemaComposer.invert({ $defs, components: collection.components, schema: converted.value, diff --git a/test/src/features/llm/invert/validate_llm_invert_ref.ts b/test/src/features/llm/invert/test_llm_invert_ref.ts similarity index 58% rename from test/src/features/llm/invert/validate_llm_invert_ref.ts rename to test/src/features/llm/invert/test_llm_invert_ref.ts index 1fcdb4f8..fbf0b44e 100644 --- a/test/src/features/llm/invert/validate_llm_invert_ref.ts +++ b/test/src/features/llm/invert/test_llm_invert_ref.ts @@ -1,34 +1,28 @@ import { TestValidator } from "@nestia/e2e"; -import { ILlmSchema, OpenApi, OpenApiTypeChecker } from "@samchon/openapi"; +import { + ILlmSchema, + IOpenApiSchemaError, + IResult, + OpenApi, + OpenApiTypeChecker, +} from "@samchon/openapi"; import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import typia, { IJsonSchemaCollection, tags } from "typia"; -export const test_chatgpt_invert_ref = (): void => - validate_llm_invert_ref("chatgpt"); - -export const test_claude_invert_ref = (): void => - validate_llm_invert_ref("claude"); - -export const test_llm_v31_invert_ref = (): void => - validate_llm_invert_ref("3.1"); - -const validate_llm_invert_ref = ( - model: Model, -): void => { +export const test_llm_invert_ref = (): void => { const collection: IJsonSchemaCollection = typia.json.schemas<[IMember]>(); - const converted = LlmSchemaComposer.parameters(model)({ - config: { - reference: true, - } as any, - components: collection.components, - schema: collection.schemas[0] as OpenApi.IJsonSchema.IReference, - }); + const converted: IResult = + LlmSchemaComposer.parameters({ + components: collection.components, + schema: collection.schemas[0] as OpenApi.IJsonSchema.IReference, + }); if (converted.success === false) throw new Error(converted.error.message); - const inverted = LlmSchemaComposer.invert(model)({ - $defs: (converted.value as any).$defs, + + const inverted: OpenApi.IJsonSchema = LlmSchemaComposer.invert({ + $defs: converted.value.$defs, components: collection.components, schema: converted.value, - } as any); + }); TestValidator.predicate("inverted")( OpenApiTypeChecker.isObject(inverted) && inverted.properties !== undefined && diff --git a/test/src/features/llm/invert/validate_llm_invert_string.ts b/test/src/features/llm/invert/test_llm_invert_string.ts similarity index 59% rename from test/src/features/llm/invert/validate_llm_invert_string.ts rename to test/src/features/llm/invert/test_llm_invert_string.ts index b54f9cf6..49966f95 100644 --- a/test/src/features/llm/invert/validate_llm_invert_string.ts +++ b/test/src/features/llm/invert/test_llm_invert_string.ts @@ -3,31 +3,16 @@ import { ILlmSchema } from "@samchon/openapi"; import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import typia, { IJsonSchemaCollection, tags } from "typia"; -export const test_chatgpt_invert_string = (): void => - validate_llm_invert_string("chatgpt"); - -export const test_claude_invert_string = (): void => - validate_llm_invert_string("claude"); - -export const test_llm_v30_invert_string = (): void => - validate_llm_invert_string("3.0"); - -export const test_llm_v31_invert_string = (): void => - validate_llm_invert_string("3.1"); - -const validate_llm_invert_string = ( - model: Model, -): void => { +export const test_llm_invert_string = (): void => { const validate = (collection: IJsonSchemaCollection) => { - const $defs: Record> = {}; - const converted = LlmSchemaComposer.schema(model)({ + const $defs: Record = {}; + const converted = LlmSchemaComposer.schema({ components: collection.components, schema: collection.schemas[0], - config: LlmSchemaComposer.defaultConfig(model) as any, $defs: $defs as any, }); if (converted.success === false) throw new Error(converted.error.message); - const inverted = LlmSchemaComposer.invert(model)({ + const inverted = LlmSchemaComposer.invert({ $defs, components: collection.components, schema: converted.value, diff --git a/test/src/features/llm/schema/validate_llm_schema_mismatch.ts b/test/src/features/llm/parameters/test_llm_parameters_mismatch.ts similarity index 65% rename from test/src/features/llm/schema/validate_llm_schema_mismatch.ts rename to test/src/features/llm/parameters/test_llm_parameters_mismatch.ts index 74391f8b..7d8d59df 100644 --- a/test/src/features/llm/schema/validate_llm_schema_mismatch.ts +++ b/test/src/features/llm/parameters/test_llm_parameters_mismatch.ts @@ -8,24 +8,7 @@ import { import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import typia, { IJsonSchemaCollection } from "typia"; -export const test_chatgpt_schema_mismatch = (): void => - validate_llm_schema_mismatch("chatgpt"); - -export const test_claude_schema_mismatch = (): void => - validate_llm_schema_mismatch("claude"); - -export const test_gemini_schema_mismatch = (): void => - validate_llm_schema_mismatch("gemini"); - -export const test_llm_v30_schema_mismatch = (): void => - validate_llm_schema_mismatch("3.0"); - -export const test_llm_v31_schema_mismatch = (): void => - validate_llm_schema_mismatch("3.1"); - -const validate_llm_schema_mismatch = ( - model: Model, -): void => { +export const test_llm_parameters_mismatch = (): void => { const collection: IJsonSchemaCollection = typia.json.schemas< [ { @@ -46,19 +29,15 @@ const validate_llm_schema_mismatch = ( p.third.items.properties.nested.$ref = "#/components/schemas/IRectangle1"; const result: IResult< - ILlmSchema, + ILlmSchema.IParameters, IOpenApiSchemaError - > = LlmSchemaComposer.schema(model)({ + > = LlmSchemaComposer.parameters({ accessor: "$input", - config: LlmSchemaComposer.defaultConfig( - model, - ) satisfies ILlmSchema.IConfig as any, components: collection.components, schema: typia.assert< OpenApi.IJsonSchema.IReference | OpenApi.IJsonSchema.IObject >(collection.schemas[0]), - $defs: {}, - } as any) as IResult, IOpenApiSchemaError>; + }); TestValidator.equals("success")(result.success)(false); TestValidator.equals("errors")( result.success ? [] : result.error.reasons.map((r) => r.accessor).sort(), diff --git a/test/src/features/llm/parameters/test_llm_parameters_reference_escaped_description_of_name.ts b/test/src/features/llm/parameters/test_llm_parameters_reference_escaped_description_of_name.ts new file mode 100644 index 00000000..0dea5636 --- /dev/null +++ b/test/src/features/llm/parameters/test_llm_parameters_reference_escaped_description_of_name.ts @@ -0,0 +1,49 @@ +import { TestValidator } from "@nestia/e2e"; +import { + ILlmSchema, + IOpenApiSchemaError, + IResult, + OpenApi, +} from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; +import typia, { IJsonSchemaCollection } from "typia"; + +export const test_llm_parameters_reference_escaped_description_of_name = + (): void => { + const collection: IJsonSchemaCollection = + typia.json.schemas<[Something.INested.IDeep]>(); + const deep: ILlmSchema.IParameters = composeSchema(collection); + TestValidator.predicate("description")( + () => !!deep.description?.includes("Something.INested.IDeep"), + ); + }; + +interface Something { + x: number; +} +namespace Something { + export interface INested { + y: number; + } + export namespace INested { + export interface IDeep { + z: number; + } + } +} + +const composeSchema = ( + collection: IJsonSchemaCollection, +): ILlmSchema.IParameters => { + const result: IResult< + ILlmSchema.IParameters, + IOpenApiSchemaError + > = LlmSchemaComposer.parameters({ + components: collection.components, + schema: typia.assert< + OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference + >(collection.schemas[0]), + }); + if (result.success === false) throw new Error("Invalid schema"); + return result.value; +}; diff --git a/test/src/features/llm/parameters/test_llm_parameters_separate_array.ts b/test/src/features/llm/parameters/test_llm_parameters_separate_array.ts new file mode 100644 index 00000000..6c8066f6 --- /dev/null +++ b/test/src/features/llm/parameters/test_llm_parameters_separate_array.ts @@ -0,0 +1,84 @@ +import { TestValidator } from "@nestia/e2e"; +import { + ILlmSchema, + IOpenApiSchemaError, + IResult, + LlmTypeChecker, + OpenApi, +} from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; +import typia, { IJsonSchemaCollection, tags } from "typia"; + +export const test_llm_parameters_separate_array = (): void => { + const separator = (schema: ILlmSchema.IParameters) => + LlmSchemaComposer.separate({ + predicate: (s) => + LlmTypeChecker.isString(s as OpenApi.IJsonSchema.IString) && + (s as OpenApi.IJsonSchema.IString).contentMediaType !== undefined, + parameters: schema, + }); + const member: ILlmSchema.IParameters = schema( + typia.json.schemas<[IManagement]>(), + ); + const upload: ILlmSchema.IParameters = schema( + typia.json.schemas<[IManagement]>(), + ); + const combined: ILlmSchema.IParameters = schema( + typia.json.schemas<[IManagement]>(), + ); + + TestValidator.equals( + "member", + (key) => key !== "description", + )(separator(member))({ + llm: member, + human: null, + }); + TestValidator.equals( + "upload", + (key) => key !== "description", + )(separator(upload))({ + llm: { + type: "object", + properties: {}, + additionalProperties: false, + required: [], + $defs: {}, + }, + human: upload, + }); + TestValidator.equals( + "combined", + (key) => key !== "description", + )(separator(combined))({ + llm: member, + human: upload, + }); +}; + +interface IManagement { + profiles: T[]; +} +interface IMember { + id: number; + name: string; +} +interface IFileUpload { + file: string & tags.ContentMediaType<"image/png">; +} +interface ICombined extends IMember, IFileUpload {} + +const schema = (collection: IJsonSchemaCollection): ILlmSchema.IParameters => { + const result: IResult = + LlmSchemaComposer.parameters({ + components: collection.components, + schema: typia.assert< + OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference + >(collection.schemas[0]), + }); + if (result.success === false) { + console.log(result.error); + throw new Error("Invalid schema"); + } + return result.value; +}; diff --git a/test/src/features/llm/parameters/test_llm_parameters_separate_nested.ts b/test/src/features/llm/parameters/test_llm_parameters_separate_nested.ts new file mode 100644 index 00000000..00f96051 --- /dev/null +++ b/test/src/features/llm/parameters/test_llm_parameters_separate_nested.ts @@ -0,0 +1,92 @@ +import { TestValidator } from "@nestia/e2e"; +import { + ILlmSchema, + IOpenApiSchemaError, + IResult, + LlmTypeChecker, + OpenApi, +} from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; +import typia, { IJsonSchemaCollection, tags } from "typia"; + +export const test_llm_parameters_separate_nested = (): void => { + const separator = (schema: ILlmSchema.IParameters) => + LlmSchemaComposer.separate({ + predicate: (s) => + LlmTypeChecker.isString(s as OpenApi.IJsonSchema.IString) && + (s as OpenApi.IJsonSchema.IString).description?.includes( + "@contentMediaType", + ) === true, + parameters: schema as any, + }); + const member: ILlmSchema.IParameters = schema()( + typia.json.schemas<[INested]>(), + ); + const upload: ILlmSchema.IParameters = schema()( + typia.json.schemas<[INested]>(), + ); + const combined: ILlmSchema.IParameters = schema()( + typia.json.schemas<[INested]>(), + ); + + TestValidator.equals( + "member", + (key) => key !== "description", + )(separator(member))({ + llm: member, + human: null, + }); + TestValidator.equals( + "upload", + (key) => key !== "description", + )(separator(upload))({ + llm: { + type: "object", + properties: {}, + additionalProperties: false, + required: [], + $defs: {}, + }, + human: upload, + }); + TestValidator.equals( + "combined", + (key) => key !== "description", + )(separator(combined))({ + llm: member, + human: upload, + }); +}; + +interface INested { + first: { + second: { + third: { + fourth: T; + }; + array: T[]; + }; + }; +} +interface IMember { + id: number; + name: string; +} +interface IFileUpload { + file: string & tags.Format<"uri"> & tags.ContentMediaType<"image/png">; +} +interface ICombined extends IMember, IFileUpload {} + +const schema = + () => + (collection: IJsonSchemaCollection): ILlmSchema.IParameters => { + const result: IResult = + LlmSchemaComposer.parameters({ + components: collection.components, + schema: typia.assert< + OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference + >(collection.schemas[0]), + }) as IResult; + if (result.success === false) throw new Error("Invalid schema"); + return result.value; + }; diff --git a/test/src/features/llm/parameters/test_llm_parameters_separate_object.ts b/test/src/features/llm/parameters/test_llm_parameters_separate_object.ts new file mode 100644 index 00000000..877f3e59 --- /dev/null +++ b/test/src/features/llm/parameters/test_llm_parameters_separate_object.ts @@ -0,0 +1,78 @@ +import { TestValidator } from "@nestia/e2e"; +import { + ILlmSchema, + IOpenApiSchemaError, + IResult, + LlmTypeChecker, + OpenApi, +} from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; +import typia, { IJsonSchemaCollection, tags } from "typia"; + +export const test_llm_parameters_separate_object = (): void => { + const separator = (schema: ILlmSchema.IParameters) => + LlmSchemaComposer.separate({ + predicate: (s) => + LlmTypeChecker.isString(s as OpenApi.IJsonSchema.IString) && + (s as OpenApi.IJsonSchema.IString).contentMediaType !== undefined, + parameters: schema, + }); + const member: ILlmSchema.IParameters = schema( + typia.json.schemas<[IMember]>(), + ); + const upload: ILlmSchema.IParameters = schema( + typia.json.schemas<[IFileUpload]>(), + ); + const combined: ILlmSchema.IParameters = schema( + typia.json.schemas<[ICombined]>(), + ); + + TestValidator.equals( + "member", + (key) => key !== "description", + )(separator(member))({ + llm: member, + human: null, + }); + TestValidator.equals( + "upload", + (key) => key !== "description", + )(separator(upload))({ + llm: { + type: "object", + properties: {}, + additionalProperties: false, + required: [], + $defs: {}, + }, + human: upload, + }); + TestValidator.equals( + "combined", + (key) => key !== "description", + )(separator(combined))({ + llm: member, + human: upload, + }); +}; + +interface IMember { + id: number; + name: string; +} +interface IFileUpload { + file: string & tags.Format<"uri"> & tags.ContentMediaType<"image/png">; +} +interface ICombined extends IMember, IFileUpload {} + +const schema = (collection: IJsonSchemaCollection): ILlmSchema.IParameters => { + const result: IResult = + LlmSchemaComposer.parameters({ + components: collection.components, + schema: typia.assert< + OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference + >(collection.schemas[0]), + }); + if (result.success === false) throw new Error("Invalid schema"); + return result.value; +}; diff --git a/test/src/features/llm/parameters/test_llm_parameters_separate_object_additionalProperties.ts b/test/src/features/llm/parameters/test_llm_parameters_separate_object_additionalProperties.ts new file mode 100644 index 00000000..7fbb1f2c --- /dev/null +++ b/test/src/features/llm/parameters/test_llm_parameters_separate_object_additionalProperties.ts @@ -0,0 +1,94 @@ +import { TestValidator } from "@nestia/e2e"; +import { + ILlmSchema, + IOpenApiSchemaError, + IResult, + LlmTypeChecker, + OpenApi, +} from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; +import typia, { IJsonSchemaCollection, tags } from "typia"; + +export const test_llm_parameters_separate_object_additionalProperties = + (): void => { + const separator = (schema: ILlmSchema.IParameters) => + LlmSchemaComposer.separate({ + predicate: (s) => + LlmTypeChecker.isString(s as OpenApi.IJsonSchema.IString) && + (s as OpenApi.IJsonSchema.IString).description?.includes( + "@contentMediaType", + ) === true, + parameters: schema as any, + }); + const params: ILlmSchema.IParameters = schema()( + typia.json.schemas<[IParameters]>(), + ); + TestValidator.equals( + "separated", + (key) => key !== "description", + )(separator(params))({ + llm: schema()( + typia.json.schemas< + [ + { + input: { + email: string; + hobbies: Record< + string, + { + id: string; + name: string; + } + >; + }; + }, + ] + >(), + ), + human: schema()( + typia.json.schemas< + [ + { + input: { + hobbies: Record< + string, + { + thumbnail: string & + tags.Format<"uri"> & + tags.ContentMediaType<"image/*">; + } + >; + }; + }, + ] + >(), + ), + }); + }; + +interface IParameters { + input: IMember; +} +interface IMember { + email: string; + hobbies: Record; +} +interface IHobby { + id: string; + name: string; + thumbnail: string & tags.Format<"uri"> & tags.ContentMediaType<"image/*">; +} + +const schema = + () => + (collection: IJsonSchemaCollection): ILlmSchema.IParameters => { + const result: IResult = + LlmSchemaComposer.parameters({ + components: collection.components, + schema: typia.assert< + OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference + >(collection.schemas[0]), + }) as IResult; + if (result.success === false) throw new Error("Invalid schema"); + return result.value; + }; diff --git a/test/src/features/llm/parameters/test_llm_parameters_separate_ref.ts b/test/src/features/llm/parameters/test_llm_parameters_separate_ref.ts new file mode 100644 index 00000000..d0a55388 --- /dev/null +++ b/test/src/features/llm/parameters/test_llm_parameters_separate_ref.ts @@ -0,0 +1,112 @@ +import { TestValidator } from "@nestia/e2e"; +import { + ILlmSchema, + IOpenApiSchemaError, + IResult, + LlmTypeChecker, + OpenApi, +} from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; +import typia, { IJsonSchemaCollection, tags } from "typia"; + +export const test_llm_parameters_separate_ref = (): void => { + const separator = (schema: ILlmSchema.IParameters) => + LlmSchemaComposer.separate({ + predicate: (s) => + LlmTypeChecker.isString(s as OpenApi.IJsonSchema.IString) && + (s as OpenApi.IJsonSchema.IString).description?.includes( + "@contentMediaType", + ) === true, + parameters: schema as any, + }); + const member: ILlmSchema.IParameters = schema()( + typia.json.schemas<[IWrapper]>(), + ); + const upload: ILlmSchema.IParameters = schema()( + typia.json.schemas<[IWrapper]>(), + ); + const combined: ILlmSchema.IParameters = schema()( + typia.json.schemas<[IWrapper]>(), + ); + + TestValidator.equals( + "member", + (key) => key !== "description", + )(separator(member))({ + llm: member, + human: null, + }); + TestValidator.equals( + "upload", + (key) => key !== "description", + )(separator(upload))({ + llm: { + type: "object", + properties: {}, + additionalProperties: false, + required: [], + $defs: {}, + }, + human: upload, + }); + TestValidator.equals( + "combined", + (key) => key !== "description", + )({ + llm: separator(combined).llm + ? { ...separator(combined).llm, $defs: {} } + : null, + human: separator(combined).human + ? { ...separator(combined).human, $defs: {} } + : null, + })({ + llm: { + $defs: {}, + type: "object", + properties: { + value: { + $ref: "#/$defs/ICombined.Llm", + }, + }, + required: ["value"], + additionalProperties: false, + } satisfies ILlmSchema.IParameters, + human: { + $defs: {}, + type: "object", + properties: { + value: { + $ref: "#/$defs/ICombined.Human", + }, + }, + required: ["value"], + additionalProperties: false, + } satisfies ILlmSchema.IParameters, + }); +}; + +interface IWrapper { + value: T; +} +interface IMember { + id: number; + name: string; +} +interface IFileUpload { + file: string & tags.Format<"uri"> & tags.ContentMediaType<"image/png">; +} +interface ICombined extends IMember, IFileUpload {} + +const schema = + () => + (collection: IJsonSchemaCollection): ILlmSchema.IParameters => { + const result: IResult = + LlmSchemaComposer.parameters({ + components: collection.components, + schema: typia.assert< + OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference + >(collection.schemas[0]), + }) as IResult; + if (result.success === false) throw new Error("Invalid schema"); + return result.value; + }; diff --git a/test/src/features/llm/parameters/test_llm_parameters_separate_validate.ts b/test/src/features/llm/parameters/test_llm_parameters_separate_validate.ts new file mode 100644 index 00000000..bae92af3 --- /dev/null +++ b/test/src/features/llm/parameters/test_llm_parameters_separate_validate.ts @@ -0,0 +1,43 @@ +import { TestValidator } from "@nestia/e2e"; +import { + ILlmFunction, + ILlmSchema, + IOpenApiSchemaError, + IResult, + OpenApi, + OpenApiTypeChecker, +} from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; +import typia from "typia"; + +export const test_llm_parameters_separate_validate = (): void => { + const collection = typia.json.schemas<[ISeparatable, IHumanOnly]>(); + const validate = (schema: OpenApi.IJsonSchema, exists: boolean) => { + const result: IResult = + LlmSchemaComposer.parameters({ + $defs: {}, + components: collection.components, + schema: schema as OpenApi.IJsonSchema.IReference, + } as any) as IResult; + if (result.success === false) throw new Error("Failed to convert"); + + const separated: ILlmFunction.ISeparated = LlmSchemaComposer.separate({ + parameters: result.value as ILlmSchema.IParameters, + predicate: (s: OpenApi.IJsonSchema) => OpenApiTypeChecker.isNumber(s), + } as any) as ILlmFunction.ISeparated; + TestValidator.equals( + "validate", + (key) => key !== "description", + )(!!separated.validate)(exists); + }; + validate(collection.schemas[0], true); + validate(collection.schemas[1], false); +}; + +interface ISeparatable { + title: string; + value: number; +} +interface IHumanOnly { + value: number; +} diff --git a/test/src/features/llm/parameters/validate_llm_parameters_tuple.ts b/test/src/features/llm/parameters/test_llm_parameters_tuple.ts similarity index 59% rename from test/src/features/llm/parameters/validate_llm_parameters_tuple.ts rename to test/src/features/llm/parameters/test_llm_parameters_tuple.ts index 43fead9e..96b46a01 100644 --- a/test/src/features/llm/parameters/validate_llm_parameters_tuple.ts +++ b/test/src/features/llm/parameters/test_llm_parameters_tuple.ts @@ -8,24 +8,7 @@ import { import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import typia, { IJsonSchemaCollection } from "typia"; -export const test_chatgpt_parameters_tuple = (): void => - validate_llm_parameters_tuple("chatgpt"); - -export const test_claude_parameters_tuple = (): void => - validate_llm_parameters_tuple("claude"); - -export const test_gemini_parameters_tuple = (): void => - validate_llm_parameters_tuple("gemini"); - -export const test_llm_v30_parameters_tuple = (): void => - validate_llm_parameters_tuple("3.0"); - -export const test_llm_v31_parameters_tuple = (): void => - validate_llm_parameters_tuple("3.1"); - -const validate_llm_parameters_tuple = ( - model: Model, -): void => { +export const test_llm_parameters_tuple = (): void => { const collection: IJsonSchemaCollection = typia.json.schemas< [ { @@ -41,18 +24,15 @@ const validate_llm_parameters_tuple = ( ] >(); const result: IResult< - ILlmSchema.IParameters, + ILlmSchema.IParameters, IOpenApiSchemaError - > = LlmSchemaComposer.parameters(model)({ + > = LlmSchemaComposer.parameters({ accessor: "$input", - config: LlmSchemaComposer.defaultConfig( - model, - ) satisfies ILlmSchema.IConfig as any, components: collection.components, schema: typia.assert< OpenApi.IJsonSchema.IReference | OpenApi.IJsonSchema.IObject >(collection.schemas[0]), - }) as IResult, IOpenApiSchemaError>; + }); TestValidator.equals("parameters")(result.success)(false); TestValidator.equals("errors")( result.success ? [] : result.error.reasons.map((r) => r.accessor).sort(), diff --git a/test/src/features/llm/parameters/validate_llm_parameters_reference_escaped_description_of_name.ts b/test/src/features/llm/parameters/validate_llm_parameters_reference_escaped_description_of_name.ts deleted file mode 100644 index 484cd984..00000000 --- a/test/src/features/llm/parameters/validate_llm_parameters_reference_escaped_description_of_name.ts +++ /dev/null @@ -1,76 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { - ILlmSchema, - IOpenApiSchemaError, - IResult, - OpenApi, -} from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import typia, { IJsonSchemaCollection } from "typia"; - -export const test_chatgpt_parameters_reference_escaped_description_of_name = - (): void => - validate_llm_parameters_reference_escaped_description_of_name("chatgpt"); - -export const test_claude_parameters_reference_escaped_description_of_name = - (): void => - validate_llm_parameters_reference_escaped_description_of_name("claude"); - -export const test_gemini_parameters_reference_escaped_description_of_name = - (): void => - validate_llm_parameters_reference_escaped_description_of_name("gemini"); - -export const test_llm_v30_parameters_reference_escaped_description_of_name = - (): void => - validate_llm_parameters_reference_escaped_description_of_name("3.0"); - -export const test_llm_v31_parameters_reference_escaped_description_of_name = - (): void => - validate_llm_parameters_reference_escaped_description_of_name("3.1"); - -const validate_llm_parameters_reference_escaped_description_of_name = < - Model extends ILlmSchema.Model, ->( - model: Model, -): void => { - const collection: IJsonSchemaCollection = - typia.json.schemas<[Something.INested.IDeep]>(); - const deep: ILlmSchema.IParameters = composeSchema(model)(collection); - TestValidator.predicate("description")( - () => !!deep.description?.includes("Something.INested.IDeep"), - ); -}; - -interface Something { - x: number; -} -namespace Something { - export interface INested { - y: number; - } - export namespace INested { - export interface IDeep { - z: number; - } - } -} - -const composeSchema = - (model: Model) => - (collection: IJsonSchemaCollection): ILlmSchema.IParameters => { - const result: IResult< - ILlmSchema.IParameters, - IOpenApiSchemaError - > = LlmSchemaComposer.parameters(model)({ - components: collection.components, - schema: typia.assert< - OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference - >(collection.schemas[0]), - config: { - ...LlmSchemaComposer.defaultConfig(model), - reference: false, - } satisfies ILlmSchema.IConfig as any, - }) as IResult, IOpenApiSchemaError>; - if (result.success === false) throw new Error("Invalid schema"); - return result.value; - }; diff --git a/test/src/features/llm/parameters/validate_llm_parameters_separate_array.ts b/test/src/features/llm/parameters/validate_llm_parameters_separate_array.ts deleted file mode 100644 index 12be9f80..00000000 --- a/test/src/features/llm/parameters/validate_llm_parameters_separate_array.ts +++ /dev/null @@ -1,123 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { - ILlmSchema, - IOpenApiSchemaError, - IResult, - OpenApi, -} from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import typia, { IJsonSchemaCollection, tags } from "typia"; - -export const test_chatgpt_parameters_separate_array = (): void => - validate_llm_parameters_separate_array("chatgpt", false); - -export const test_claude_parameters_separate_array = (): void => - validate_llm_parameters_separate_array("claude", true); - -export const test_gemini_parameters_separate_array = (): void => - validate_llm_parameters_separate_array("gemini", false); - -export const test_llm_v30_parameters_separate_array = (): void => { - validate_llm_parameters_separate_array("3.0", false); - validate_llm_parameters_separate_array("3.0", true); -}; - -export const test_llm_v31_parameters_separate_array = (): void => { - validate_llm_parameters_separate_array("3.1", false); - validate_llm_parameters_separate_array("3.1", true); -}; - -const validate_llm_parameters_separate_array = ( - model: Model, - constraint: boolean, -): void => { - const separator = (schema: ILlmSchema.IParameters) => - LlmSchemaComposer.separateParameters(model)({ - predicate: (s) => - LlmSchemaComposer.typeChecker(model).isString( - s as OpenApi.IJsonSchema.IString, - ) && - (constraint - ? (s as OpenApi.IJsonSchema.IString).contentMediaType !== undefined - : (s as OpenApi.IJsonSchema.IString).description?.includes( - "@contentMediaType", - ) === true), - parameters: schema as any, - }); - const member: ILlmSchema.IParameters = schema( - model, - constraint, - )(typia.json.schemas<[IManagement]>()); - const upload: ILlmSchema.IParameters = schema( - model, - constraint, - )(typia.json.schemas<[IManagement]>()); - const combined: ILlmSchema.IParameters = schema( - model, - constraint, - )(typia.json.schemas<[IManagement]>()); - - TestValidator.equals( - "member", - (key) => key !== "description", - )(separator(member))({ - llm: member, - human: null, - }); - TestValidator.equals( - "upload", - (key) => key !== "description", - )(separator(upload))({ - llm: { - type: "object", - properties: {}, - additionalProperties: false, - required: [], - $defs: {}, - }, - human: upload, - }); - TestValidator.equals( - "combined", - (key) => key !== "description", - )(separator(combined))({ - llm: member, - human: upload, - }); -}; - -interface IManagement { - profiles: T[]; -} -interface IMember { - id: number; - name: string; -} -interface IFileUpload { - file: string & tags.ContentMediaType<"image/png">; -} -interface ICombined extends IMember, IFileUpload {} - -const schema = - (model: Model, constraint: boolean) => - (collection: IJsonSchemaCollection): ILlmSchema.IParameters => { - const result: IResult< - ILlmSchema.IParameters, - IOpenApiSchemaError - > = LlmSchemaComposer.parameters(model)({ - components: collection.components, - schema: typia.assert< - OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference - >(collection.schemas[0]), - config: { - ...LlmSchemaComposer.defaultConfig(model), - reference: false, - constraint, - } satisfies ILlmSchema.IConfig as any, - }) as IResult, IOpenApiSchemaError>; - if (result.success === false) { - console.log(result.error); - throw new Error("Invalid schema"); - } - return result.value; - }; diff --git a/test/src/features/llm/parameters/validate_llm_parameters_separate_nested.ts b/test/src/features/llm/parameters/validate_llm_parameters_separate_nested.ts deleted file mode 100644 index 5933a288..00000000 --- a/test/src/features/llm/parameters/validate_llm_parameters_separate_nested.ts +++ /dev/null @@ -1,129 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { - ILlmSchema, - IOpenApiSchemaError, - IResult, - OpenApi, -} from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import typia, { IJsonSchemaCollection, tags } from "typia"; - -export const test_chatgpt_parameters_separate_nested = (): void => - validate_llm_parameters_separate_nested("chatgpt", false); - -export const test_claude_parameters_separate_nested = (): void => - validate_llm_parameters_separate_nested("claude", true); - -export const test_gemini_parameters_separate_nested = (): void => - validate_llm_parameters_separate_nested("gemini", false); - -export const test_llm_v30_parameters_separate_nested = (): void => { - validate_llm_parameters_separate_nested("3.0", false); - validate_llm_parameters_separate_nested("3.0", true); -}; - -export const test_llm_v31_parameters_separate_nested = (): void => { - validate_llm_parameters_separate_nested("3.1", false); - validate_llm_parameters_separate_nested("3.1", true); -}; - -const validate_llm_parameters_separate_nested = < - Model extends ILlmSchema.Model, ->( - model: Model, - constraint: boolean, -): void => { - const separator = (schema: ILlmSchema.IParameters) => - LlmSchemaComposer.separateParameters(model)({ - predicate: (s) => - LlmSchemaComposer.typeChecker(model).isString( - s as OpenApi.IJsonSchema.IString, - ) && - (constraint - ? (s as OpenApi.IJsonSchema.IString).contentMediaType !== undefined - : (s as OpenApi.IJsonSchema.IString).description?.includes( - "@contentMediaType", - ) === true), - parameters: schema as any, - }); - const member: ILlmSchema.IParameters = schema( - model, - constraint, - )(typia.json.schemas<[INested]>()); - const upload: ILlmSchema.IParameters = schema( - model, - constraint, - )(typia.json.schemas<[INested]>()); - const combined: ILlmSchema.IParameters = schema( - model, - constraint, - )(typia.json.schemas<[INested]>()); - - TestValidator.equals( - "member", - (key) => key !== "description", - )(separator(member))({ - llm: member, - human: null, - }); - TestValidator.equals( - "upload", - (key) => key !== "description", - )(separator(upload))({ - llm: { - type: "object", - properties: {}, - additionalProperties: false, - required: [], - $defs: {}, - }, - human: upload, - }); - TestValidator.equals( - "combined", - (key) => key !== "description", - )(separator(combined))({ - llm: member, - human: upload, - }); -}; - -interface INested { - first: { - second: { - third: { - fourth: T; - }; - array: T[]; - }; - }; -} -interface IMember { - id: number; - name: string; -} -interface IFileUpload { - file: string & tags.Format<"uri"> & tags.ContentMediaType<"image/png">; -} -interface ICombined extends IMember, IFileUpload {} - -const schema = - (model: Model, constraint: boolean) => - (collection: IJsonSchemaCollection): ILlmSchema.IParameters => { - const result: IResult< - ILlmSchema.IParameters, - IOpenApiSchemaError - > = LlmSchemaComposer.parameters(model)({ - components: collection.components, - schema: typia.assert< - OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference - >(collection.schemas[0]), - config: { - ...LlmSchemaComposer.defaultConfig(model), - reference: false, - constraint, - } satisfies ILlmSchema.IConfig as any, - }) as IResult, IOpenApiSchemaError>; - if (result.success === false) throw new Error("Invalid schema"); - return result.value; - }; diff --git a/test/src/features/llm/parameters/validate_llm_parameters_separate_object.ts b/test/src/features/llm/parameters/validate_llm_parameters_separate_object.ts deleted file mode 100644 index 7fc65085..00000000 --- a/test/src/features/llm/parameters/validate_llm_parameters_separate_object.ts +++ /dev/null @@ -1,119 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { - ILlmSchema, - IOpenApiSchemaError, - IResult, - OpenApi, -} from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import typia, { IJsonSchemaCollection, tags } from "typia"; - -export const test_chatgpt_parameters_separate_object = (): void => - validate_llm_parameters_separate_object("chatgpt", false); - -export const test_claude_parameters_separate_object = (): void => - validate_llm_parameters_separate_object("claude", true); - -export const test_gemini_parameters_separate_object = (): void => - validate_llm_parameters_separate_object("gemini", false); - -export const test_llm_v30_parameters_separate_object = (): void => { - validate_llm_parameters_separate_object("3.0", false); - validate_llm_parameters_separate_object("3.1", false); -}; - -export const test_llm_v31_parameters_separate_object = (): void => { - validate_llm_parameters_separate_object("3.0", true); - validate_llm_parameters_separate_object("3.1", true); -}; - -const validate_llm_parameters_separate_object = < - Model extends ILlmSchema.Model, ->( - model: Model, - constraint: boolean, -): void => { - const separator = (schema: ILlmSchema.IParameters) => - LlmSchemaComposer.separateParameters(model)({ - predicate: (s) => - LlmSchemaComposer.typeChecker(model).isString( - s as OpenApi.IJsonSchema.IString, - ) && - (constraint - ? (s as OpenApi.IJsonSchema.IString).contentMediaType !== undefined - : (s as OpenApi.IJsonSchema.IString).description?.includes( - "@contentMediaType", - ) === true), - parameters: schema as any, - }); - const member: ILlmSchema.IParameters = schema( - model, - constraint, - )(typia.json.schemas<[IMember]>()); - const upload: ILlmSchema.IParameters = schema( - model, - constraint, - )(typia.json.schemas<[IFileUpload]>()); - const combined: ILlmSchema.IParameters = schema( - model, - constraint, - )(typia.json.schemas<[ICombined]>()); - - TestValidator.equals( - "member", - (key) => key !== "description", - )(separator(member))({ - llm: member, - human: null, - }); - TestValidator.equals( - "upload", - (key) => key !== "description", - )(separator(upload))({ - llm: { - type: "object", - properties: {}, - additionalProperties: false, - required: [], - $defs: {}, - }, - human: upload, - }); - TestValidator.equals( - "combined", - (key) => key !== "description", - )(separator(combined))({ - llm: member, - human: upload, - }); -}; - -interface IMember { - id: number; - name: string; -} -interface IFileUpload { - file: string & tags.Format<"uri"> & tags.ContentMediaType<"image/png">; -} -interface ICombined extends IMember, IFileUpload {} - -const schema = - (model: Model, constraint: boolean) => - (collection: IJsonSchemaCollection): ILlmSchema.IParameters => { - const result: IResult< - ILlmSchema.IParameters, - IOpenApiSchemaError - > = LlmSchemaComposer.parameters(model)({ - components: collection.components, - schema: typia.assert< - OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference - >(collection.schemas[0]), - config: { - ...LlmSchemaComposer.defaultConfig(model), - reference: false, - constraint, - } satisfies ILlmSchema.IConfig as any, - }) as IResult, IOpenApiSchemaError>; - if (result.success === false) throw new Error("Invalid schema"); - return result.value; - }; diff --git a/test/src/features/llm/parameters/validate_llm_parameters_separate_object_additionalProperties.ts b/test/src/features/llm/parameters/validate_llm_parameters_separate_object_additionalProperties.ts deleted file mode 100644 index de44d67a..00000000 --- a/test/src/features/llm/parameters/validate_llm_parameters_separate_object_additionalProperties.ts +++ /dev/null @@ -1,150 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { - ILlmSchema, - IOpenApiSchemaError, - IResult, - OpenApi, -} from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import typia, { IJsonSchemaCollection, tags } from "typia"; - -export const test_chatgpt_parameters_separate_object_additionalProperties = - (): void => - validate_llm_parameters_separate_object_additionalProperties( - "chatgpt", - false, - ); - -export const test_claude_parameters_separate_object_additionalProperties = - (): void => - validate_llm_parameters_separate_object_additionalProperties( - "claude", - true, - ); - -export const test_gemini_parameters_separate_object_additionalProperties = - (): void => - TestValidator.error("Geimini does not support additionalProperties")(() => - validate_llm_parameters_separate_object_additionalProperties( - "gemini", - false, - ), - ); - -export const test_llm_v30_parameters_separate_object_additionalProperties = - (): void => { - validate_llm_parameters_separate_object_additionalProperties("3.0", false); - validate_llm_parameters_separate_object_additionalProperties("3.0", true); - }; - -export const test_llm_v31_parameters_separate_object_additionalProperties = - (): void => { - validate_llm_parameters_separate_object_additionalProperties("3.1", false); - validate_llm_parameters_separate_object_additionalProperties("3.1", true); - }; - -const validate_llm_parameters_separate_object_additionalProperties = < - Model extends ILlmSchema.Model, ->( - model: Model, - constraint: boolean, -): void => { - const separator = (schema: ILlmSchema.IParameters) => - LlmSchemaComposer.separateParameters(model)({ - predicate: (s) => - LlmSchemaComposer.typeChecker(model).isString( - s as OpenApi.IJsonSchema.IString, - ) && - (constraint - ? (s as OpenApi.IJsonSchema.IString).contentMediaType !== undefined - : (s as OpenApi.IJsonSchema.IString).description?.includes( - "@contentMediaType", - ) === true), - parameters: schema as any, - }); - const params: ILlmSchema.IParameters = schema( - model, - constraint, - )(typia.json.schemas<[IParameters]>()); - TestValidator.equals( - model, - (key) => key !== "description", - )(separator(params))({ - llm: schema( - model, - constraint, - )( - typia.json.schemas< - [ - { - input: { - email: string; - hobbies: Record< - string, - { - id: string; - name: string; - } - >; - }; - }, - ] - >(), - ), - human: schema( - model, - constraint, - )( - typia.json.schemas< - [ - { - input: { - hobbies: Record< - string, - { - thumbnail: string & - tags.Format<"uri"> & - tags.ContentMediaType<"image/*">; - } - >; - }; - }, - ] - >(), - ), - }); -}; - -interface IParameters { - input: IMember; -} -interface IMember { - email: string; - hobbies: Record; -} -interface IHobby { - id: string; - name: string; - thumbnail: string & tags.Format<"uri"> & tags.ContentMediaType<"image/*">; -} - -const schema = - (model: Model, constraint: boolean) => - (collection: IJsonSchemaCollection): ILlmSchema.IParameters => { - const result: IResult< - ILlmSchema.IParameters, - IOpenApiSchemaError - > = LlmSchemaComposer.parameters(model)({ - components: collection.components, - schema: typia.assert< - OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference - >(collection.schemas[0]), - config: { - ...LlmSchemaComposer.defaultConfig(model), - reference: false, - constraint, - } satisfies ILlmSchema.IConfig as any, - }) as IResult, IOpenApiSchemaError>; - if (result.success === false) throw new Error("Invalid schema"); - return result.value; - }; diff --git a/test/src/features/llm/parameters/validate_llm_parameters_separate_ref.ts b/test/src/features/llm/parameters/validate_llm_parameters_separate_ref.ts deleted file mode 100644 index 09e06061..00000000 --- a/test/src/features/llm/parameters/validate_llm_parameters_separate_ref.ts +++ /dev/null @@ -1,142 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { - IChatGptSchema, - ILlmSchema, - IOpenApiSchemaError, - IResult, - OpenApi, -} from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import typia, { IJsonSchemaCollection, tags } from "typia"; - -export const test_chatgpt_parameters_separate_ref = (): void => - validate_llm_parameters_separate_ref("chatgpt", false); - -export const test_claude_parameters_separate_ref = (): void => - validate_llm_parameters_separate_ref("claude", true); - -export const test_llm_v31_parameters_separate_ref = (): void => { - validate_llm_parameters_separate_ref("3.1", false); - validate_llm_parameters_separate_ref("3.1", true); -}; - -const validate_llm_parameters_separate_ref = < - Model extends Exclude, ->( - model: Model, - constraint: boolean, -): void => { - const separator = (schema: ILlmSchema.IParameters) => - LlmSchemaComposer.separateParameters(model)({ - predicate: (s) => - LlmSchemaComposer.typeChecker(model).isString( - s as OpenApi.IJsonSchema.IString, - ) && - (constraint - ? (s as OpenApi.IJsonSchema.IString).contentMediaType !== undefined - : (s as OpenApi.IJsonSchema.IString).description?.includes( - "@contentMediaType", - ) === true), - parameters: schema as any, - }); - const member: ILlmSchema.IParameters = schema( - model, - constraint, - )(typia.json.schemas<[IWrapper]>()); - const upload: ILlmSchema.IParameters = schema( - model, - constraint, - )(typia.json.schemas<[IWrapper]>()); - const combined: ILlmSchema.IParameters = schema( - model, - constraint, - )(typia.json.schemas<[IWrapper]>()); - - TestValidator.equals( - "member", - (key) => key !== "description", - )(separator(member))({ - llm: member, - human: null, - }); - TestValidator.equals( - "upload", - (key) => key !== "description", - )(separator(upload))({ - llm: { - type: "object", - properties: {}, - additionalProperties: false, - required: [], - $defs: {}, - }, - human: upload, - }); - TestValidator.equals( - "combined", - (key) => key !== "description", - )({ - llm: separator(combined).llm - ? { ...separator(combined).llm, $defs: {} } - : null, - human: separator(combined).human - ? { ...separator(combined).human, $defs: {} } - : null, - })({ - llm: { - $defs: {}, - type: "object", - properties: { - value: { - $ref: "#/$defs/ICombined.Llm", - }, - }, - required: ["value"], - additionalProperties: false, - } satisfies IChatGptSchema.IParameters, - human: { - $defs: {}, - type: "object", - properties: { - value: { - $ref: "#/$defs/ICombined.Human", - }, - }, - required: ["value"], - additionalProperties: false, - } satisfies IChatGptSchema.IParameters, - }); -}; - -interface IWrapper { - value: T; -} -interface IMember { - id: number; - name: string; -} -interface IFileUpload { - file: string & tags.Format<"uri"> & tags.ContentMediaType<"image/png">; -} -interface ICombined extends IMember, IFileUpload {} - -const schema = - (model: Model, constraint: boolean) => - (collection: IJsonSchemaCollection): ILlmSchema.IParameters => { - const result: IResult< - ILlmSchema.IParameters, - IOpenApiSchemaError - > = LlmSchemaComposer.parameters(model)({ - components: collection.components, - schema: typia.assert< - OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference - >(collection.schemas[0]), - config: { - ...LlmSchemaComposer.defaultConfig(model), - reference: true, - constraint, - } satisfies ILlmSchema.IConfig as any, - }) as IResult, IOpenApiSchemaError>; - if (result.success === false) throw new Error("Invalid schema"); - return result.value; - }; diff --git a/test/src/features/llm/parameters/validate_llm_parameters_separate_validate.ts b/test/src/features/llm/parameters/validate_llm_parameters_separate_validate.ts deleted file mode 100644 index 715e392f..00000000 --- a/test/src/features/llm/parameters/validate_llm_parameters_separate_validate.ts +++ /dev/null @@ -1,66 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { - ILlmFunction, - ILlmSchema, - IOpenApiSchemaError, - IResult, - OpenApi, - OpenApiTypeChecker, -} from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import typia from "typia"; - -export const test_chatgpt_parameters_separate_validate = (): void => - validate_llm_parameters_separate_validate("chatgpt"); - -export const test_claude_parameters_separate_validate = (): void => - validate_llm_parameters_separate_validate("claude"); - -export const test_gemini_parameters_separate_validate = (): void => - validate_llm_parameters_separate_validate("gemini"); - -export const test_llm_v30_parameters_separate_validate = (): void => - validate_llm_parameters_separate_validate("3.0"); - -export const test_llm_v31_parameters_separate_validate = (): void => - validate_llm_parameters_separate_validate("3.1"); - -const validate_llm_parameters_separate_validate = < - Model extends ILlmSchema.Model, ->( - model: Model, -): void => { - const collection = typia.json.schemas<[ISeparatable, IHumanOnly]>(); - const validate = (schema: OpenApi.IJsonSchema, exists: boolean) => { - const result: IResult< - ILlmSchema.IParameters, - IOpenApiSchemaError - > = LlmSchemaComposer.parameters(model)({ - $defs: {}, - components: collection.components, - schema: schema as OpenApi.IJsonSchema.IReference, - config: LlmSchemaComposer.defaultConfig(model), - } as any) as IResult, IOpenApiSchemaError>; - if (result.success === false) throw new Error("Failed to convert"); - - const separated: ILlmFunction.ISeparated = - LlmSchemaComposer.separateParameters(model)({ - parameters: result.value as ILlmSchema.IParameters, - predicate: (s: OpenApi.IJsonSchema) => OpenApiTypeChecker.isNumber(s), - } as any) as ILlmFunction.ISeparated; - TestValidator.equals( - "validate", - (key) => key !== "description", - )(!!separated.validate)(exists); - }; - validate(collection.schemas[0], true); - validate(collection.schemas[1], false); -}; - -interface ISeparatable { - title: string; - value: number; -} -interface IHumanOnly { - value: number; -} diff --git a/test/src/features/llm/chatgpt/test_chatgpt_schema_discriminator.ts b/test/src/features/llm/schema/test_llm_schema_discriminator.ts similarity index 69% rename from test/src/features/llm/chatgpt/test_chatgpt_schema_discriminator.ts rename to test/src/features/llm/schema/test_llm_schema_discriminator.ts index eec35b61..d0e7460d 100644 --- a/test/src/features/llm/chatgpt/test_chatgpt_schema_discriminator.ts +++ b/test/src/features/llm/schema/test_llm_schema_discriminator.ts @@ -1,31 +1,28 @@ import { TestValidator } from "@nestia/e2e"; import { - ChatGptTypeChecker, - IChatGptSchema, + ILlmSchema, IOpenApiSchemaError, IResult, + LlmTypeChecker, OpenApi, OpenApiTypeChecker, } from "@samchon/openapi"; -import { ChatGptSchemaComposer } from "@samchon/openapi/lib/composers/llm/ChatGptSchemaComposer"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import typia, { IJsonSchemaUnit } from "typia"; -export const test_chatgpt_schema_discriminator = (): void => { - const $defs: Record = {}; +export const test_llm_schema_discriminator = (): void => { + const $defs: Record = {}; const unit: IJsonSchemaUnit = typia.json.schema(); - const result: IResult = - ChatGptSchemaComposer.schema({ - config: { - reference: true, - }, - $defs: {}, + const result: IResult = + LlmSchemaComposer.schema({ + $defs, components: unit.components, schema: unit.schema, }); if (result.success === false) throw new Error("Failed to transform"); TestValidator.predicate("discriminator")( () => - ChatGptTypeChecker.isAnyOf(result.value) && + LlmTypeChecker.isAnyOf(result.value) && result.value["x-discriminator"] !== undefined && result.value["x-discriminator"].mapping !== undefined && Object.values(result.value["x-discriminator"].mapping).every((k) => @@ -33,7 +30,7 @@ export const test_chatgpt_schema_discriminator = (): void => { ), ); - const invert: OpenApi.IJsonSchema = ChatGptSchemaComposer.invert({ + const invert: OpenApi.IJsonSchema = LlmSchemaComposer.invert({ components: {}, $defs, schema: result.value, diff --git a/test/src/features/llm/schema/test_llm_schema_enum.ts b/test/src/features/llm/schema/test_llm_schema_enum.ts new file mode 100644 index 00000000..f91f30dc --- /dev/null +++ b/test/src/features/llm/schema/test_llm_schema_enum.ts @@ -0,0 +1,38 @@ +import { TestValidator } from "@nestia/e2e"; +import { + ILlmSchema, + IOpenApiSchemaError, + IResult, + OpenApi, +} from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; +import typia, { IJsonSchemaCollection } from "typia"; + +export const test_llm_schema_enum = (): void => { + const collection: IJsonSchemaCollection = typia.json.schemas<[IBbsArticle]>(); + const result: IResult = + LlmSchemaComposer.parameters({ + components: collection.components, + schema: collection.schemas[0] as + | OpenApi.IJsonSchema.IObject + | OpenApi.IJsonSchema.IReference, + config: { + reference: false, + }, + }); + TestValidator.equals("success")(result.success)(true); + if (result.success === false) return; + + const formatted: ILlmSchema.IParameters = result.value; + const formatProp = formatted.properties.format as ILlmSchema.IString; + TestValidator.equals("enum")(formatProp.enum)(["html", "md", "txt"]); +}; + +interface IBbsArticle { + format: IBbsArticle.Format; + // title: string; + // body: string; +} +namespace IBbsArticle { + export type Format = "html" | "md" | "txt"; +} diff --git a/test/src/features/llm/schema/validate_llm_schema_enum_reference.ts b/test/src/features/llm/schema/test_llm_schema_enum_reference.ts similarity index 53% rename from test/src/features/llm/schema/validate_llm_schema_enum_reference.ts rename to test/src/features/llm/schema/test_llm_schema_enum_reference.ts index 7c774f88..3c2e315d 100644 --- a/test/src/features/llm/schema/validate_llm_schema_enum_reference.ts +++ b/test/src/features/llm/schema/test_llm_schema_enum_reference.ts @@ -7,20 +7,7 @@ import { } from "@samchon/openapi"; import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -export const test_chatgpt_schema_enum_reference = (): void => - validate_llm_schema_enum_reference("chatgpt"); - -export const test_gemini_schema_enum_reference = (): void => - validate_llm_schema_enum_reference("gemini"); - -export const test_llm_v30_schema_enum_reference = (): void => - validate_llm_schema_enum_reference("3.0"); - -const validate_llm_schema_enum_reference = < - Model extends "chatgpt" | "gemini" | "3.0", ->( - model: Model, -): void => { +export const test_llm_schema_enum_reference = (): void => { const components: OpenApi.IComponents = { schemas: { named: { @@ -46,18 +33,15 @@ const validate_llm_schema_enum_reference = < ], }; - const result: IResult< - ILlmSchema, - IOpenApiSchemaError - > = LlmSchemaComposer.schema(model)({ - components, - schema, - config: { - ...LlmSchemaComposer.defaultConfig(model), - reference: false, - } as any, - $defs: {}, - }) as IResult, IOpenApiSchemaError>; + const result: IResult = + LlmSchemaComposer.schema({ + components, + schema, + $defs: {}, + config: { + reference: false, + }, + }); TestValidator.equals( "success", (key) => key === "description", diff --git a/test/src/features/llm/schema/validate_llm_schema_invert.ts b/test/src/features/llm/schema/test_llm_schema_invert.ts similarity index 64% rename from test/src/features/llm/schema/validate_llm_schema_invert.ts rename to test/src/features/llm/schema/test_llm_schema_invert.ts index 896f1855..2406984d 100644 --- a/test/src/features/llm/schema/validate_llm_schema_invert.ts +++ b/test/src/features/llm/schema/test_llm_schema_invert.ts @@ -1,40 +1,23 @@ import { TestValidator } from "@nestia/e2e"; -import { ILlmSchema } from "@samchon/openapi"; +import { ILlmSchema, IOpenApiSchemaError, IResult } from "@samchon/openapi"; import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import typia, { IJsonSchemaUnit, tags } from "typia"; -export const test_chatgpt_schema_invert = () => - validate_llm_schema_invert("chatgpt"); - -export const test_claude_schema_invert = () => - validate_llm_schema_invert("claude"); - -export const test_gemini_schema_invert = () => - validate_llm_schema_invert("gemini"); - -export const test_llm_v30_schema_invert = () => - validate_llm_schema_invert("3.0"); - -export const test_llm_v31_schema_invert = () => - validate_llm_schema_invert("3.1"); - -const validate_llm_schema_invert = ( - model: Model, -) => { +export const test_llm_schema_invert = (): void => { const assert = (title: string, unit: IJsonSchemaUnit): void => { - const result = LlmSchemaComposer.schema(model)({ - config: LlmSchemaComposer.defaultConfig(model) as any, - components: unit.components, - schema: unit.schema, - $defs: {}, - }); + const result: IResult = + LlmSchemaComposer.schema({ + components: unit.components, + schema: unit.schema, + $defs: {}, + }); if (result.success === false) throw new Error("Failed to compose LLM schema."); - const inverted = LlmSchemaComposer.invert(model)({ + const inverted = LlmSchemaComposer.invert({ components: {}, $defs: {}, schema: result.value, - } as any); + }); TestValidator.equals(title, (key) => key === "description")(inverted)( unit.schema, ); diff --git a/test/src/features/llm/parameters/validate_llm_parameters_mismatch.ts b/test/src/features/llm/schema/test_llm_schema_mismatch.ts similarity index 54% rename from test/src/features/llm/parameters/validate_llm_parameters_mismatch.ts rename to test/src/features/llm/schema/test_llm_schema_mismatch.ts index fe12e982..3718de77 100644 --- a/test/src/features/llm/parameters/validate_llm_parameters_mismatch.ts +++ b/test/src/features/llm/schema/test_llm_schema_mismatch.ts @@ -8,24 +8,7 @@ import { import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import typia, { IJsonSchemaCollection } from "typia"; -export const test_chatgpt_parameters_mismatch = (): void => - validate_llm_parameters_mismatch("chatgpt"); - -export const test_claude_parameters_mismatch = (): void => - validate_llm_parameters_mismatch("claude"); - -export const test_gemini_parameters_mismatch = (): void => - validate_llm_parameters_mismatch("gemini"); - -export const test_llm_v30_parameters_mismatch = (): void => - validate_llm_parameters_mismatch("3.0"); - -export const test_llm_v31_parameters_mismatch = (): void => - validate_llm_parameters_mismatch("3.1"); - -const validate_llm_parameters_mismatch = ( - model: Model, -): void => { +export const test_llm_schema_mismatch = (): void => { const collection: IJsonSchemaCollection = typia.json.schemas< [ { @@ -45,19 +28,15 @@ const validate_llm_parameters_mismatch = ( p.second.properties.input.$ref = "#/components/schemas/ICircle1"; p.third.items.properties.nested.$ref = "#/components/schemas/IRectangle1"; - const result: IResult< - ILlmSchema.IParameters, - IOpenApiSchemaError - > = LlmSchemaComposer.parameters(model)({ - accessor: "$input", - config: LlmSchemaComposer.defaultConfig( - model, - ) satisfies ILlmSchema.IConfig as any, - components: collection.components, - schema: typia.assert< - OpenApi.IJsonSchema.IReference | OpenApi.IJsonSchema.IObject - >(collection.schemas[0]), - }) as IResult, IOpenApiSchemaError>; + const result: IResult = + LlmSchemaComposer.schema({ + accessor: "$input", + components: collection.components, + schema: typia.assert< + OpenApi.IJsonSchema.IReference | OpenApi.IJsonSchema.IObject + >(collection.schemas[0]), + $defs: {}, + }); TestValidator.equals("success")(result.success)(false); TestValidator.equals("errors")( result.success ? [] : result.error.reasons.map((r) => r.accessor).sort(), diff --git a/test/src/features/llm/schema/test_llm_schema_nullable.ts b/test/src/features/llm/schema/test_llm_schema_nullable.ts new file mode 100644 index 00000000..5dddb259 --- /dev/null +++ b/test/src/features/llm/schema/test_llm_schema_nullable.ts @@ -0,0 +1,28 @@ +import { TestValidator } from "@nestia/e2e"; +import { ILlmSchema, IOpenApiSchemaError, IResult } from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; +import typia, { IJsonSchemaCollection } from "typia"; + +export const test_llm_schema_nullable = (): void => { + const collection: IJsonSchemaCollection = + typia.json.schemas<[number | null]>(); + const result: IResult< + ILlmSchema, + IOpenApiSchemaError + > = LlmSchemaComposer.schema({ + components: collection.components, + schema: typia.assert(collection.schemas[0]), + $defs: {}, + }); + TestValidator.equals("success")(result.success)(true); + TestValidator.equals("nullable")(result.success ? result.value : {})({ + anyOf: [ + { + type: "null", + }, + { + type: "number", + }, + ], + }); +}; diff --git a/test/src/features/llm/schema/test_llm_schema_oneof.ts b/test/src/features/llm/schema/test_llm_schema_oneof.ts new file mode 100644 index 00000000..7e9f9ccd --- /dev/null +++ b/test/src/features/llm/schema/test_llm_schema_oneof.ts @@ -0,0 +1,50 @@ +import { TestValidator } from "@nestia/e2e"; +import { ILlmSchema, IOpenApiSchemaError, IResult } from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; +import typia, { IJsonSchemaCollection } from "typia"; + +export const test_llm_schema_oneof = (): void => { + const collection: IJsonSchemaCollection = + typia.json.schemas<[IPoint | ILine | ITriangle | IRectangle]>(); + + const $defs: Record = {}; + const result: IResult = + LlmSchemaComposer.schema({ + $defs, + components: collection.components, + schema: collection.schemas[0], + config: { + reference: false, + }, + }); + TestValidator.equals("success")(result.success); + TestValidator.equals("anyOf")(["point", "line", "triangle", "rectangle"])( + (result as any)?.value?.anyOf?.map( + (e: any) => e.properties?.type?.enum?.[0], + ), + ); +}; + +interface IPoint { + type: "point"; + x: number; + y: number; +} +interface ILine { + type: "line"; + p1: IPoint; + p2: IPoint; +} +interface ITriangle { + type: "triangle"; + p1: IPoint; + p2: IPoint; + p3: IPoint; +} +interface IRectangle { + type: "rectangle"; + p1: IPoint; + p2: IPoint; + p3: IPoint; + p4: IPoint; +} diff --git a/test/src/features/llm/schema/test_llm_schema_recursive_ref.ts b/test/src/features/llm/schema/test_llm_schema_recursive_ref.ts new file mode 100644 index 00000000..8f129e88 --- /dev/null +++ b/test/src/features/llm/schema/test_llm_schema_recursive_ref.ts @@ -0,0 +1,54 @@ +import { TestValidator } from "@nestia/e2e"; +import { ILlmSchema, IOpenApiSchemaError, IResult } from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; + +export const test_llm_schema_recursive_ref = (): void => { + const $defs: Record = {}; + const result: IResult = + LlmSchemaComposer.schema({ + $defs, + components: { + schemas: { + Department: { + type: "object", + properties: { + name: { + type: "string", + }, + children: { + type: "array", + items: { + $ref: "#/components/schemas/Department", + }, + }, + }, + required: ["name", "children"], + }, + }, + }, + schema: { + $ref: "#/components/schemas/Department", + }, + }); + TestValidator.equals("success")(result.success)(true); + TestValidator.equals("$defs")({ + Department: { + type: "object", + properties: { + name: { + type: "string", + }, + children: { + type: "array", + items: { + $ref: "#/$defs/Department", + }, + }, + }, + required: ["name", "children"], + }, + } satisfies Record as Record)($defs); + TestValidator.equals("schema")(result.success ? result.value : {})({ + $ref: "#/$defs/Department", + }); +}; diff --git a/test/src/features/llm/schema/test_llm_schema_reference_escaped_description_of_name.ts b/test/src/features/llm/schema/test_llm_schema_reference_escaped_description_of_name.ts new file mode 100644 index 00000000..71910b8c --- /dev/null +++ b/test/src/features/llm/schema/test_llm_schema_reference_escaped_description_of_name.ts @@ -0,0 +1,59 @@ +import { TestValidator } from "@nestia/e2e"; +import { + ILlmSchema, + IOpenApiSchemaError, + IResult, + OpenApi, +} from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; +import typia, { IJsonSchemaCollection } from "typia"; + +export const test_llm_schema_reference_escaped_description_of_name = + (): void => { + const collection: IJsonSchemaCollection = typia.json.schemas< + [ + { + deep: Something.INested.IDeep; + nested: Something.INested; + something: Something; + }, + ] + >(); + const schema: ILlmSchema.IParameters = composeSchema(collection); + const deep: ILlmSchema.IObject = schema.properties + .deep as ILlmSchema.IObject; + TestValidator.predicate("description")( + () => !!deep.description?.includes("Something.INested.IDeep"), + ); + }; + +interface Something { + x: number; +} +namespace Something { + export interface INested { + y: number; + } + export namespace INested { + export interface IDeep { + z: number; + } + } +} + +const composeSchema = ( + collection: IJsonSchemaCollection, +): ILlmSchema.IParameters => { + const result: IResult = + LlmSchemaComposer.parameters({ + components: collection.components, + schema: typia.assert< + OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference + >(collection.schemas[0]), + config: { + reference: false, + }, + }); + if (result.success === false) throw new Error("Invalid schema"); + return result.value; +}; diff --git a/test/src/features/llm/schema/test_llm_schema_reference_escaped_description_of_namespace.ts b/test/src/features/llm/schema/test_llm_schema_reference_escaped_description_of_namespace.ts new file mode 100644 index 00000000..25e7f97d --- /dev/null +++ b/test/src/features/llm/schema/test_llm_schema_reference_escaped_description_of_namespace.ts @@ -0,0 +1,69 @@ +import { TestValidator } from "@nestia/e2e"; +import { + ILlmSchema, + IOpenApiSchemaError, + IResult, + OpenApi, +} from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; +import typia, { IJsonSchemaCollection } from "typia"; + +export const test_llm_schema_reference_escaped_description_of_namespace = + (): void => { + const collection: IJsonSchemaCollection = typia.json.schemas< + [ + { + deep: Something.INested.IDeep; + nested: Something.INested; + something: Something; + }, + ] + >(); + const schema: ILlmSchema.IParameters = composeSchema(collection); + const deep: ILlmSchema = schema.properties.deep as ILlmSchema; + TestValidator.predicate("description")(() => { + const description: string | undefined = ( + deep as OpenApi.IJsonSchema.IObject + ).description; + return ( + !!description && + description.includes("Something interface") && + description.includes("Something nested interface") && + description.includes("Something nested and deep interface") + ); + }); + }; + +/** Something interface. */ +interface Something { + x: number; +} +namespace Something { + /** Something nested interface. */ + export interface INested { + y: number; + } + export namespace INested { + /** Something nested and deep interface. */ + export interface IDeep { + z: number; + } + } +} + +const composeSchema = ( + collection: IJsonSchemaCollection, +): ILlmSchema.IParameters => { + const result: IResult = + LlmSchemaComposer.parameters({ + components: collection.components, + schema: typia.assert< + OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference + >(collection.schemas[0]), + config: { + reference: false, + }, + }); + if (result.success === false) throw new Error("Invalid schema"); + return result.value; +}; diff --git a/test/src/features/llm/schema/test_llm_schema_reference_escaped_description_of_property.ts b/test/src/features/llm/schema/test_llm_schema_reference_escaped_description_of_property.ts new file mode 100644 index 00000000..9233410e --- /dev/null +++ b/test/src/features/llm/schema/test_llm_schema_reference_escaped_description_of_property.ts @@ -0,0 +1,42 @@ +import { TestValidator } from "@nestia/e2e"; +import { OpenApi } from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; +import typia, { IJsonSchemaCollection, tags } from "typia"; + +export const test_llm_schema_reference_escaped_description_of_property = + (): void => { + const collection: IJsonSchemaCollection = typia.json.schemas<[IMember]>(); + const result = LlmSchemaComposer.parameters({ + components: collection.components, + schema: collection.schemas[0]! as OpenApi.IJsonSchema.IReference, + config: { + strict: true, + }, + }); + if (result.success === false) + throw new Error("Failed to compose LLM schema."); + + TestValidator.equals("property description")( + result.value.properties.hobby.description, + )(undefined); + }; + +interface IMember { + id: string & tags.Format<"uuid">; + name: string; + age: number & + tags.Type<"uint32"> & + tags.Minimum<20> & + tags.ExclusiveMaximum<100>; + /** + * A hobby. + * + * The main hobby. + */ + hobby: IHobby; +} + +/** The hobby type. */ +interface IHobby { + name: string; +} diff --git a/test/src/features/llm/schema/test_llm_schema_separate_object_empty.ts b/test/src/features/llm/schema/test_llm_schema_separate_object_empty.ts new file mode 100644 index 00000000..b9fc7974 --- /dev/null +++ b/test/src/features/llm/schema/test_llm_schema_separate_object_empty.ts @@ -0,0 +1,35 @@ +import { TestValidator } from "@nestia/e2e"; +import { + ILlmSchema, + IOpenApiSchemaError, + IResult, + OpenApi, + OpenApiTypeChecker, +} from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; +import typia, { IJsonSchemaCollection } from "typia"; + +export const test_llm_schema_separate_object_empty = (): void => { + TestValidator.equals("separated")( + LlmSchemaComposer.separate({ + predicate: (schema: OpenApi.IJsonSchema) => + OpenApiTypeChecker.isInteger(schema), + parameters: schema(typia.json.schemas<[{}]>()), + }), + )({ + llm: schema(typia.json.schemas<[{}]>()), + human: null, + }); +}; + +const schema = (collection: IJsonSchemaCollection): ILlmSchema.IParameters => { + const result: IResult = + LlmSchemaComposer.parameters({ + components: collection.components, + schema: typia.assert< + OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference + >(collection.schemas[0]), + }); + if (result.success === false) throw new Error("Invalid schema"); + return result.value; +}; diff --git a/test/src/features/llm/schema/test_llm_schema_separate_string.ts b/test/src/features/llm/schema/test_llm_schema_separate_string.ts new file mode 100644 index 00000000..bcaf8980 --- /dev/null +++ b/test/src/features/llm/schema/test_llm_schema_separate_string.ts @@ -0,0 +1,63 @@ +import { TestValidator } from "@nestia/e2e"; +import { + ILlmSchema, + IOpenApiSchemaError, + IResult, + LlmTypeChecker, + OpenApi, +} from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; +import typia, { IJsonSchemaCollection, tags } from "typia"; + +export const test_llm_schema_separate_string = (): void => { + const separator = (schema: ILlmSchema.IParameters) => + LlmSchemaComposer.separate({ + predicate: (s) => + LlmTypeChecker.isString(s) && s.contentMediaType !== undefined, + parameters: schema, + }); + const plain: ILlmSchema.IParameters = schema( + typia.json.schemas< + [ + { + name: string; + }, + ] + >(), + ); + const upload: ILlmSchema.IParameters = schema( + typia.json.schemas< + [ + { + file: string & tags.ContentMediaType<"image/*">; + }, + ] + >(), + ); + TestValidator.equals("plain")(separator(plain))({ + llm: plain, + human: null, + }); + TestValidator.equals("upload")(separator(upload))({ + llm: { + type: "object", + properties: {}, + additionalProperties: false, + required: [], + $defs: {}, + }, + human: upload, + }); +}; + +const schema = (collection: IJsonSchemaCollection): ILlmSchema.IParameters => { + const result: IResult = + LlmSchemaComposer.parameters({ + components: collection.components, + schema: typia.assert< + OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference + >(collection.schemas[0]), + }); + if (result.success === false) throw new Error("Invalid schema"); + return result.value; +}; diff --git a/test/src/features/llm/chatgpt/test_chatgpt_schema_strict.ts b/test/src/features/llm/schema/test_llm_schema_strict_additionalProperties.ts similarity index 85% rename from test/src/features/llm/chatgpt/test_chatgpt_schema_strict.ts rename to test/src/features/llm/schema/test_llm_schema_strict_additionalProperties.ts index 000eb0b7..0f7f387a 100644 --- a/test/src/features/llm/chatgpt/test_chatgpt_schema_strict.ts +++ b/test/src/features/llm/schema/test_llm_schema_strict_additionalProperties.ts @@ -3,7 +3,7 @@ import { ILlmSchema, IOpenApiSchemaError, IResult } from "@samchon/openapi"; import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import typia, { IJsonSchemaCollection } from "typia"; -export const test_chatgpt_schema_strict = (): void => { +export const test_llm_schema_strict_additionalProperties = (): void => { const collection: IJsonSchemaCollection = typia.json.schemas< [ { @@ -17,15 +17,14 @@ export const test_chatgpt_schema_strict = (): void => { ] >(); const res: IResult = - LlmSchemaComposer.schema("chatgpt")({ - config: { - ...LlmSchemaComposer.defaultConfig("chatgpt"), - strict: true, - }, + LlmSchemaComposer.schema({ components: collection.components, schema: collection.schemas[0], $defs: {}, - } as any); + config: { + strict: true, + }, + }); TestValidator.equals("strict")({ type: "object", additionalProperties: false, diff --git a/test/src/features/llm/chatgpt/test_chatgpt_schema_reference_description.ts b/test/src/features/llm/schema/test_llm_schema_strict_description.ts similarity index 69% rename from test/src/features/llm/chatgpt/test_chatgpt_schema_reference_description.ts rename to test/src/features/llm/schema/test_llm_schema_strict_description.ts index b0528c67..c9c25b19 100644 --- a/test/src/features/llm/chatgpt/test_chatgpt_schema_reference_description.ts +++ b/test/src/features/llm/schema/test_llm_schema_strict_description.ts @@ -3,15 +3,14 @@ import { OpenApi } from "@samchon/openapi"; import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import typia, { IJsonSchemaCollection, tags } from "typia"; -export const test_chatgpt_schema_reference_description = () => { +export const test_llm_schema_strict_description = () => { const collection: IJsonSchemaCollection = typia.json.schemas<[IMember]>(); - const result = LlmSchemaComposer.parameters("chatgpt")({ + const result = LlmSchemaComposer.parameters({ + components: collection.components, + schema: collection.schemas[0]! as OpenApi.IJsonSchema.IReference, config: { - reference: true, strict: true, }, - components: collection.components, - schema: collection.schemas[0]! as OpenApi.IJsonSchema.IReference, }); TestValidator.predicate("type description")( result.success === true && @@ -24,19 +23,6 @@ export const test_chatgpt_schema_reference_description = () => { result.success === true && result.value.properties.hobby.description === undefined, ); - - const nonStrict = LlmSchemaComposer.parameters("chatgpt")({ - config: { - reference: true, - strict: false, - }, - components: collection.components, - schema: collection.schemas[0]! as OpenApi.IJsonSchema.IReference, - }); - TestValidator.equals("non-strict $ref description")( - nonStrict.success === true && - nonStrict.value.properties.hobby.description === "The hobby type.", - ); }; interface IMember { diff --git a/test/src/features/llm/schema/validate_llm_schema_tuple.ts b/test/src/features/llm/schema/test_llm_schema_tuple.ts similarity index 53% rename from test/src/features/llm/schema/validate_llm_schema_tuple.ts rename to test/src/features/llm/schema/test_llm_schema_tuple.ts index 3bf113bd..1ff5586d 100644 --- a/test/src/features/llm/schema/validate_llm_schema_tuple.ts +++ b/test/src/features/llm/schema/test_llm_schema_tuple.ts @@ -8,24 +8,7 @@ import { import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import typia, { IJsonSchemaCollection } from "typia"; -export const test_chatgpt_schema_tuple = (): void => - validate_llm_schema_tuple("chatgpt"); - -export const test_claude_schema_tuple = (): void => - validate_llm_schema_tuple("claude"); - -export const test_gemini_schema_tuple = (): void => - validate_llm_schema_tuple("gemini"); - -export const test_llm_v30_schema_tuple = (): void => - validate_llm_schema_tuple("3.0"); - -export const test_llm_v31_schema_tuple = (): void => - validate_llm_schema_tuple("3.1"); - -const validate_llm_schema_tuple = ( - model: Model, -): void => { +export const test_llm_schema_tuple = (): void => { const collection: IJsonSchemaCollection = typia.json.schemas< [ [string, number], @@ -44,7 +27,7 @@ const validate_llm_schema_tuple = ( }>, ] >(); - const v = validate(model)(collection.components); + const v = validate(collection.components); v(collection.schemas[0])(["$input"]); v(collection.schemas[1])([ `$input.properties["input"]`, @@ -56,22 +39,16 @@ const validate_llm_schema_tuple = ( }; const validate = - (model: Model) => (components: OpenApi.IComponents) => (schema: OpenApi.IJsonSchema) => (expected: string[]): void => { - const result: IResult< - ILlmSchema.IParameters, - IOpenApiSchemaError - > = LlmSchemaComposer.schema(model)({ - config: LlmSchemaComposer.defaultConfig( - model, - ) satisfies ILlmSchema.IConfig as any, - accessor: "$input", - components, - schema, - $defs: {}, - } as any) as IResult, IOpenApiSchemaError>; + const result: IResult = + LlmSchemaComposer.schema({ + accessor: "$input", + components, + schema, + $defs: {}, + }); TestValidator.equals("success")(result.success)(false); TestValidator.equals("errors")( result.success ? [] : result.error.reasons.map((r) => r.accessor).sort(), diff --git a/test/src/features/llm/schema/test_llm_type_checker_cover_any.ts b/test/src/features/llm/schema/test_llm_type_checker_cover_any.ts new file mode 100644 index 00000000..7c4cc749 --- /dev/null +++ b/test/src/features/llm/schema/test_llm_type_checker_cover_any.ts @@ -0,0 +1,34 @@ +import { TestValidator } from "@nestia/e2e"; +import { ILlmSchema, LlmTypeChecker, OpenApi } from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; +import typia, { IJsonSchemaCollection } from "typia"; + +export const test_llm_type_checker_cover_any = () => { + const collection: IJsonSchemaCollection = typia.json.schemas<[IBasic]>(); + const result = LlmSchemaComposer.parameters({ + components: collection.components, + schema: collection.schemas[0] as OpenApi.IJsonSchema.IReference, + }); + if (result.success === false) + throw new Error(`Failed to compose parameters.`); + + const parameters = result.value; + const check = (x: ILlmSchema, y: ILlmSchema): boolean => + LlmTypeChecker.covers({ + x, + y, + $defs: parameters.$defs, + }); + TestValidator.equals("any covers (string | null)")(true)( + check(parameters.properties.any, parameters.properties.string_or_null), + ); + TestValidator.equals("any covers (string | undefined)")(true)( + check(parameters.properties.any, parameters.properties.string_or_undefined), + ); +}; + +interface IBasic { + any: any; + string_or_null: null | string; + string_or_undefined: string | undefined; +} diff --git a/test/src/features/llm/schema/test_llm_type_checker_cover_array.ts b/test/src/features/llm/schema/test_llm_type_checker_cover_array.ts new file mode 100644 index 00000000..213d1cc0 --- /dev/null +++ b/test/src/features/llm/schema/test_llm_type_checker_cover_array.ts @@ -0,0 +1,173 @@ +import { TestValidator } from "@nestia/e2e"; +import { LlmTypeChecker, OpenApi } from "@samchon/openapi"; +import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; +import typia, { IJsonSchemaCollection } from "typia"; + +export const test_llm_type_checker_cover_array = () => { + const collection: IJsonSchemaCollection = + typia.json.schemas<[Plan2D, Plan3D, Box2D, Box3D]>(); + const components: OpenApi.IComponents = collection.components as any; + const plan2D: OpenApi.IJsonSchema = components.schemas!.Plan2D; + const plan3D: OpenApi.IJsonSchema = components.schemas!.Plan3D; + const box2D: OpenApi.IJsonSchema = components.schemas!.Box2D; + const box3D: OpenApi.IJsonSchema = components.schemas!.Box3D; + + const $defs = {}; + const check = (x: OpenApi.IJsonSchema, y: OpenApi.IJsonSchema): boolean => { + const [a, b] = [x, y].map((schema) => { + const result = LlmSchemaComposer.schema({ + components: collection.components, + schema: schema, + $defs, + }); + if (result.success === false) + throw new Error(`Failed to compose schema.`); + return result.value; + }); + return LlmTypeChecker.covers({ + x: a, + y: b, + $defs, + }); + }; + + TestValidator.equals("Plan3D[] covers Plan2D[]")(true)( + check({ type: "array", items: plan3D }, { type: "array", items: plan2D }), + ); + TestValidator.equals("Box3D[] covers Box2D[]")(true)( + check({ type: "array", items: box3D }, { type: "array", items: box2D }), + ); + TestValidator.equals("Array covers Array")(true)( + check( + { + type: "array", + items: { + oneOf: [plan3D, box3D], + }, + }, + { + type: "array", + items: { + oneOf: [plan2D, box2D], + }, + }, + ), + ); + TestValidator.equals("(Plan3D|Box3D)[] covers (Plan2D|Box2D)[]")(true)( + check( + { + oneOf: [ + { type: "array", items: plan3D }, + { type: "array", items: box3D }, + ], + }, + { + oneOf: [ + { type: "array", items: plan2D }, + { type: "array", items: box2D }, + ], + }, + ), + ); + + TestValidator.equals("Plan2D[] can't cover Plan3D[]")(false)( + check({ type: "array", items: plan2D }, { type: "array", items: plan3D }), + ); + TestValidator.equals("Box2D[] can't cover Box3D[]")(false)( + check({ type: "array", items: box2D }, { type: "array", items: box3D }), + ); + TestValidator.equals("Array can't cover Array")( + false, + )( + check( + { + type: "array", + items: { + oneOf: [plan2D, box2D], + }, + }, + { + type: "array", + items: { + oneOf: [plan3D, box3D], + }, + }, + ), + ); + TestValidator.equals("(Plan2D[]|Box2D[]) can't cover (Plan3D[]|Box3D[])")( + false, + )( + check( + { + oneOf: [ + { type: "array", items: plan2D }, + { type: "array", items: box2D }, + ], + }, + { + oneOf: [ + { type: "array", items: plan3D }, + { type: "array", items: box3D }, + ], + }, + ), + ); + TestValidator.equals("Plan3D[] can't cover (Plan2D|Box2D)[]")(false)( + check( + { type: "array", items: plan3D }, + { + oneOf: [ + { type: "array", items: plan2D }, + { type: "array", items: box2D }, + ], + }, + ), + ); + TestValidator.equals("Box3D[] can't cover Array")(false)( + check( + { type: "array", items: box3D }, + { + type: "array", + items: { + oneOf: [plan2D, box2D], + }, + }, + ), + ); +}; + +type Plan2D = { + center: Point2D; + size: Point2D; + geometries: Geometry2D[]; +}; +type Plan3D = { + center: Point3D; + size: Point3D; + geometries: Geometry3D[]; +}; +type Geometry3D = { + position: Point3D; + scale: Point3D; +}; +type Geometry2D = { + position: Point2D; + scale: Point2D; +}; +type Point2D = { + x: number; + y: number; +}; +type Point3D = { + x: number; + y: number; + z: number; +}; +type Box2D = { + size: Point2D; + nested: Point2D[]; +}; +type Box3D = { + size: Point3D; + nested: Point3D[]; +}; diff --git a/test/src/features/llm/schema/validate_llm_schema_discriminator.ts b/test/src/features/llm/schema/validate_llm_schema_discriminator.ts deleted file mode 100644 index 8d017ae6..00000000 --- a/test/src/features/llm/schema/validate_llm_schema_discriminator.ts +++ /dev/null @@ -1,68 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { - ILlmSchemaV3_1, - IOpenApiSchemaError, - IResult, - LlmTypeCheckerV3_1, - OpenApi, - OpenApiTypeChecker, -} from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import typia, { IJsonSchemaUnit } from "typia"; - -export const test_claude_schema_discriminator = (): void => - validate_llm_schema_discriminator("claude"); - -export const test_llama_v31_schema_discriminator = (): void => - validate_llm_schema_discriminator("3.1"); - -const validate_llm_schema_discriminator = (vendor: "claude" | "3.1"): void => { - const $defs: Record = {}; - const unit: IJsonSchemaUnit = typia.json.schema(); - const result: IResult = - LlmSchemaComposer.schema(vendor)({ - config: { - reference: true, - constraint: true, - }, - $defs, - components: unit.components, - schema: unit.schema, - }); - if (result.success === false) throw new Error("Failed to transform"); - TestValidator.predicate("discriminator")( - () => - LlmTypeCheckerV3_1.isOneOf(result.value) && - result.value.discriminator !== undefined && - result.value.discriminator.mapping !== undefined && - Object.values(result.value.discriminator.mapping).every((k) => - k.startsWith("#/$defs/"), - ), - ); - - const invert: OpenApi.IJsonSchema = LlmSchemaComposer.invert(vendor)({ - components: {}, - $defs, - schema: result.value, - }); - TestValidator.predicate("invert")( - () => - OpenApiTypeChecker.isOneOf(invert) && - invert.discriminator !== undefined && - invert.discriminator.mapping !== undefined && - Object.values(invert.discriminator.mapping).every((k) => - k.startsWith("#/components/schemas/"), - ), - ); -}; - -interface ICat { - type: "cat"; - name: string; - ribbon: boolean; -} -interface IAnt { - type: "ant"; - name: string; - role: "queen" | "soldier" | "worker"; -} diff --git a/test/src/features/llm/schema/validate_llm_schema_enum.ts b/test/src/features/llm/schema/validate_llm_schema_enum.ts deleted file mode 100644 index f8dc5c42..00000000 --- a/test/src/features/llm/schema/validate_llm_schema_enum.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { - IGeminiSchema, - ILlmSchema, - IOpenApiSchemaError, - IResult, -} from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import typia, { IJsonSchemaCollection } from "typia"; - -export const test_chatgpt_schema_enum = (): void => - validate_llm_schema_enum("chatgpt"); - -export const test_gemini_schema_enum = (): void => - validate_llm_schema_enum("gemini"); - -export const test_llm_v30_schema_enum = (): void => - validate_llm_schema_enum("3.0"); - -const validate_llm_schema_enum = ( - model: Model, -): void => { - const collection: IJsonSchemaCollection = typia.json.schemas<[IBbsArticle]>(); - const result: IResult< - ILlmSchema, - IOpenApiSchemaError - > = LlmSchemaComposer.schema(model)({ - components: collection.components, - schema: collection.schemas[0], - config: { - ...LlmSchemaComposer.defaultConfig(model), - reference: false, - } as any, - $defs: {}, - }) as IResult, IOpenApiSchemaError>; - TestValidator.equals("success")(result.success); - TestValidator.equals("enum")( - typia.assert( - typia.assert(result.success ? result.value : {}) - .properties.format, - ).enum, - )(["html", "md", "txt"]); -}; - -interface IBbsArticle { - format: IBbsArticle.Format; - // title: string; - // body: string; -} -namespace IBbsArticle { - export type Format = "html" | "md" | "txt"; -} diff --git a/test/src/features/llm/schema/validate_llm_schema_nullable.ts b/test/src/features/llm/schema/validate_llm_schema_nullable.ts deleted file mode 100644 index 11674aa0..00000000 --- a/test/src/features/llm/schema/validate_llm_schema_nullable.ts +++ /dev/null @@ -1,54 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { ILlmSchema, IOpenApiSchemaError, IResult } from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import typia, { IJsonSchemaCollection } from "typia"; - -export const test_chatgpt_schema_nullable = (): void => - validate_llm_schema_nullable("chatgpt", "anyOf"); - -export const test_claude_schema_nullable = (): void => - validate_llm_schema_nullable("claude", "oneOf"); - -export const test_gemini_schema_nullable = (): void => - validate_llm_schema_nullable("gemini", "anyOf"); - -export const test_llm_v30_schema_nullable = (): void => - validate_llm_schema_nullable("3.0", "nullable"); - -export const test_llm_v31_schema_nullable = (): void => - validate_llm_schema_nullable("3.1", "oneOf"); - -const validate_llm_schema_nullable = ( - model: Model, - expected: "nullable" | "oneOf" | "anyOf", -): void => { - const collection: IJsonSchemaCollection = - typia.json.schemas<[number | null]>(); - const result: IResult< - ILlmSchema, - IOpenApiSchemaError - > = LlmSchemaComposer.schema(model)({ - config: LlmSchemaComposer.defaultConfig(model) as any, - components: collection.components, - schema: typia.assert(collection.schemas[0]), - $defs: {}, - } as any) as IResult, IOpenApiSchemaError>; - TestValidator.equals("success")(result.success)(true); - TestValidator.equals("nullable")(result.success ? result.value : {})( - expected === "nullable" - ? ({ - type: "number", - nullable: true, - } as any) - : ({ - [expected]: [ - { - type: "null", - }, - { - type: "number", - }, - ], - } as any), - ); -}; diff --git a/test/src/features/llm/schema/validate_llm_schema_oneof.ts b/test/src/features/llm/schema/validate_llm_schema_oneof.ts deleted file mode 100644 index 3201f899..00000000 --- a/test/src/features/llm/schema/validate_llm_schema_oneof.ts +++ /dev/null @@ -1,69 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { ILlmSchema, IOpenApiSchemaError, IResult } from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import typia, { IJsonSchemaCollection } from "typia"; - -export const test_chatgpt_schema_anyof = (): void => - validate_llm_schema_oneof("chatgpt", "anyOf", false); - -export const test_claude_schema_oneof = (): void => - validate_llm_schema_oneof("claude", "oneOf", true); - -export const test_llm_v30_schema_oneof = (): void => - validate_llm_schema_oneof("3.0", "oneOf", false); - -export const test_llm_v31_schema_oneof = (): void => - validate_llm_schema_oneof("3.1", "oneOf", true); - -const validate_llm_schema_oneof = ( - model: Model, - field: "oneOf" | "anyOf", - constant: boolean, -): void => { - const collection: IJsonSchemaCollection = - typia.json.schemas<[IPoint | ILine | ITriangle | IRectangle]>(); - - const $defs: Record> = {}; - const result: IResult< - ILlmSchema, - IOpenApiSchemaError - > = LlmSchemaComposer.schema(model)({ - $defs: $defs as any, - components: collection.components, - schema: collection.schemas[0], - config: { - ...LlmSchemaComposer.defaultConfig(model), - reference: false, - } as any, - }) as IResult, IOpenApiSchemaError>; - TestValidator.equals("success")(result.success); - TestValidator.equals(field)(["point", "line", "triangle", "rectangle"])( - (result as any)?.value?.[field]?.map((e: any) => - constant ? e.properties?.type?.const : e.properties?.type?.enum?.[0], - ), - ); -}; - -interface IPoint { - type: "point"; - x: number; - y: number; -} -interface ILine { - type: "line"; - p1: IPoint; - p2: IPoint; -} -interface ITriangle { - type: "triangle"; - p1: IPoint; - p2: IPoint; - p3: IPoint; -} -interface IRectangle { - type: "rectangle"; - p1: IPoint; - p2: IPoint; - p3: IPoint; - p4: IPoint; -} diff --git a/test/src/features/llm/schema/validate_llm_schema_recursive_ref.ts b/test/src/features/llm/schema/validate_llm_schema_recursive_ref.ts deleted file mode 100644 index 2c3a0613..00000000 --- a/test/src/features/llm/schema/validate_llm_schema_recursive_ref.ts +++ /dev/null @@ -1,73 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { ILlmSchema, IOpenApiSchemaError, IResult } from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; - -export const test_chatgpt_schema_recursive_ref = (): void => - validate_llm_schema_recursive_ref("chatgpt"); - -export const test_claude_schema_recursive_ref = (): void => - validate_llm_schema_recursive_ref("claude"); - -export const test_llm_v31_schema_recursive_ref = (): void => - validate_llm_schema_recursive_ref("3.1"); - -const validate_llm_schema_recursive_ref = < - Model extends Exclude, ->( - model: Model, -): void => { - const $defs: Record> = {}; - const result: IResult< - ILlmSchema, - IOpenApiSchemaError - > = LlmSchemaComposer.schema(model)({ - $defs: $defs as any, - components: { - schemas: { - Department: { - type: "object", - properties: { - name: { - type: "string", - }, - children: { - type: "array", - items: { - $ref: "#/components/schemas/Department", - }, - }, - }, - required: ["name", "children"], - }, - }, - }, - schema: { - $ref: "#/components/schemas/Department", - }, - config: { - ...(LlmSchemaComposer.defaultConfig(model) as any), - reference: false, - }, - }) as IResult, IOpenApiSchemaError>; - TestValidator.equals("success")(result.success)(true); - TestValidator.equals("$defs")({ - Department: { - type: "object", - properties: { - name: { - type: "string", - }, - children: { - type: "array", - items: { - $ref: "#/$defs/Department", - }, - }, - }, - required: ["name", "children"], - }, - })($defs as any); - TestValidator.equals("schema")(result.success ? result.value : {})({ - $ref: "#/$defs/Department", - }); -}; diff --git a/test/src/features/llm/schema/validate_llm_schema_reference_escaped_description_of_name.ts b/test/src/features/llm/schema/validate_llm_schema_reference_escaped_description_of_name.ts deleted file mode 100644 index 7ced29fa..00000000 --- a/test/src/features/llm/schema/validate_llm_schema_reference_escaped_description_of_name.ts +++ /dev/null @@ -1,86 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { - ILlmSchema, - IOpenApiSchemaError, - IResult, - OpenApi, -} from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import typia, { IJsonSchemaCollection } from "typia"; - -export const test_chatgpt_schema_reference_escaped_description_of_name = - (): void => - validate_llm_schema_reference_escaped_description_of_name("chatgpt"); - -export const test_claude_schema_reference_escaped_description_of_name = - (): void => - validate_llm_schema_reference_escaped_description_of_name("claude"); - -export const test_gemini_schema_reference_escaped_description_of_name = - (): void => - validate_llm_schema_reference_escaped_description_of_name("gemini"); - -export const test_llm_v30_schema_reference_escaped_description_of_name = - (): void => validate_llm_schema_reference_escaped_description_of_name("3.0"); - -export const test_llm_v31_schema_reference_escaped_description_of_name = - (): void => validate_llm_schema_reference_escaped_description_of_name("3.1"); - -const validate_llm_schema_reference_escaped_description_of_name = < - Model extends ILlmSchema.Model, ->( - model: Model, -): void => { - const collection: IJsonSchemaCollection = typia.json.schemas< - [ - { - deep: Something.INested.IDeep; - nested: Something.INested; - something: Something; - }, - ] - >(); - const schema: ILlmSchema.IParameters = - composeSchema(model)(collection); - const deep: ILlmSchema = schema.properties.deep as ILlmSchema; - TestValidator.predicate("description")( - () => - !!(deep as OpenApi.IJsonSchema.IObject).description?.includes( - "Something.INested.IDeep", - ), - ); -}; - -interface Something { - x: number; -} -namespace Something { - export interface INested { - y: number; - } - export namespace INested { - export interface IDeep { - z: number; - } - } -} - -const composeSchema = - (model: Model) => - (collection: IJsonSchemaCollection): ILlmSchema.IParameters => { - const result: IResult< - ILlmSchema.IParameters, - IOpenApiSchemaError - > = LlmSchemaComposer.parameters(model)({ - components: collection.components, - schema: typia.assert< - OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference - >(collection.schemas[0]), - config: { - ...LlmSchemaComposer.defaultConfig(model), - reference: false, - } satisfies ILlmSchema.IConfig as any, - }) as IResult, IOpenApiSchemaError>; - if (result.success === false) throw new Error("Invalid schema"); - return result.value; - }; diff --git a/test/src/features/llm/schema/validate_llm_schema_reference_escaped_description_of_namespace.ts b/test/src/features/llm/schema/validate_llm_schema_reference_escaped_description_of_namespace.ts deleted file mode 100644 index 481a2b77..00000000 --- a/test/src/features/llm/schema/validate_llm_schema_reference_escaped_description_of_namespace.ts +++ /dev/null @@ -1,96 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { - ILlmSchema, - IOpenApiSchemaError, - IResult, - OpenApi, -} from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import typia, { IJsonSchemaCollection } from "typia"; - -export const test_chatgpt_schema_reference_escaped_description_of_namespace = - (): void => - validate_llm_schema_reference_escaped_description_of_namespace("chatgpt"); - -export const test_claude_schema_reference_escaped_description_of_namespace = - (): void => - validate_llm_schema_reference_escaped_description_of_namespace("claude"); - -export const test_gemini_schema_reference_escaped_description_of_namespace = - (): void => - validate_llm_schema_reference_escaped_description_of_namespace("gemini"); - -export const test_llm_v30_schema_reference_escaped_description_of_namespace = - (): void => - validate_llm_schema_reference_escaped_description_of_namespace("3.0"); - -export const test_llm_v31_schema_reference_escaped_description_of_namespace = - (): void => - validate_llm_schema_reference_escaped_description_of_namespace("3.1"); - -const validate_llm_schema_reference_escaped_description_of_namespace = < - Model extends ILlmSchema.Model, ->( - model: Model, -): void => { - const collection: IJsonSchemaCollection = typia.json.schemas< - [ - { - deep: Something.INested.IDeep; - nested: Something.INested; - something: Something; - }, - ] - >(); - const schema: ILlmSchema.IParameters = - composeSchema(model)(collection); - const deep: ILlmSchema = schema.properties.deep as ILlmSchema; - TestValidator.predicate("description")(() => { - const description: string | undefined = ( - deep as OpenApi.IJsonSchema.IObject - ).description; - return ( - !!description && - description.includes("Something interface") && - description.includes("Something nested interface") && - description.includes("Something nested and deep interface") - ); - }); -}; - -/** Something interface. */ -interface Something { - x: number; -} -namespace Something { - /** Something nested interface. */ - export interface INested { - y: number; - } - export namespace INested { - /** Something nested and deep interface. */ - export interface IDeep { - z: number; - } - } -} - -const composeSchema = - (model: Model) => - (collection: IJsonSchemaCollection): ILlmSchema.IParameters => { - const result: IResult< - ILlmSchema.IParameters, - IOpenApiSchemaError - > = LlmSchemaComposer.parameters(model)({ - components: collection.components, - schema: typia.assert< - OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference - >(collection.schemas[0]), - config: { - ...LlmSchemaComposer.defaultConfig(model), - reference: false, - } satisfies ILlmSchema.IConfig as any, - }) as IResult, IOpenApiSchemaError>; - if (result.success === false) throw new Error("Invalid schema"); - return result.value; - }; diff --git a/test/src/features/llm/schema/validate_llm_schema_reference_escaped_description_of_property.ts b/test/src/features/llm/schema/validate_llm_schema_reference_escaped_description_of_property.ts deleted file mode 100644 index 80b5aff6..00000000 --- a/test/src/features/llm/schema/validate_llm_schema_reference_escaped_description_of_property.ts +++ /dev/null @@ -1,70 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { ILlmSchema, OpenApi } from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import typia, { IJsonSchemaCollection, tags } from "typia"; - -export const test_chatgpt_schema_reference_escaped_description_of_property = - (): void => - validate_llm_schema_reference_escaped_description_of_property("chatgpt"); - -export const test_claude_schema_reference_escaped_description_of_property = - (): void => - validate_llm_schema_reference_escaped_description_of_property("claude"); - -export const test_gemini_schema_reference_escaped_description_of_property = - (): void => - validate_llm_schema_reference_escaped_description_of_property("gemini"); - -export const test_llm_v30_schema_reference_escaped_description_of_property = - (): void => - validate_llm_schema_reference_escaped_description_of_property("3.0"); - -export const test_llm_v31_schema_reference_escaped_description_of_property = - (): void => - validate_llm_schema_reference_escaped_description_of_property("3.1"); - -const validate_llm_schema_reference_escaped_description_of_property = < - Model extends ILlmSchema.Model, ->( - model: Model, -): void => { - const collection: IJsonSchemaCollection = typia.json.schemas<[IMember]>(); - const result = LlmSchemaComposer.parameters(model)({ - config: { - reference: false, - } as any, - components: collection.components, - schema: collection.schemas[0]! as OpenApi.IJsonSchema.IReference, - }); - TestValidator.predicate("description")(() => { - if (result.success === false) return false; - const description: string | undefined = ( - result.value.properties.hobby as OpenApi.IJsonSchema.IObject - ).description; - return ( - !!description?.includes("A hobby") && - !!description?.includes("The main hobby") && - !!description?.includes("The hobby type") - ); - }); -}; - -interface IMember { - id: string & tags.Format<"uuid">; - name: string; - age: number & - tags.Type<"uint32"> & - tags.Minimum<20> & - tags.ExclusiveMaximum<100>; - /** - * A hobby. - * - * The main hobby. - */ - hobby: IHobby; -} - -/** The hobby type. */ -interface IHobby { - name: string; -} diff --git a/test/src/features/llm/schema/validate_llm_schema_separate_object_empty.ts b/test/src/features/llm/schema/validate_llm_schema_separate_object_empty.ts deleted file mode 100644 index f7c2ead3..00000000 --- a/test/src/features/llm/schema/validate_llm_schema_separate_object_empty.ts +++ /dev/null @@ -1,61 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { - ILlmSchema, - IOpenApiSchemaError, - IResult, - OpenApi, - OpenApiTypeChecker, -} from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import typia, { IJsonSchemaCollection } from "typia"; - -export const test_chatgpt_schema_separate_object_empty = (): void => - validate_llm_schema_separate_object_empty("chatgpt"); - -export const test_claude_schema_separate_object_empty = (): void => - validate_llm_schema_separate_object_empty("claude"); - -export const test_gemini_schema_separate_object_empty = (): void => - validate_llm_schema_separate_object_empty("gemini"); - -export const test_llm_v30_schema_separate_object_empty = (): void => - validate_llm_schema_separate_object_empty("3.0"); - -export const test_llm_v31_schema_separate_object_empty = (): void => - validate_llm_schema_separate_object_empty("3.1"); - -const validate_llm_schema_separate_object_empty = < - Model extends ILlmSchema.Model, ->( - model: Model, -): void => { - TestValidator.equals("separated")( - LlmSchemaComposer.separateParameters(model)({ - predicate: ((schema: OpenApi.IJsonSchema) => - OpenApiTypeChecker.isInteger(schema)) as any, - parameters: schema(model)(typia.json.schemas<[{}]>()) as any, - }), - )({ - llm: schema(model)(typia.json.schemas<[{}]>()) as any, - human: null, - }); -}; - -const schema = - (model: Model) => - (collection: IJsonSchemaCollection): ILlmSchema.IParameters => { - const result: IResult< - ILlmSchema.IParameters, - IOpenApiSchemaError - > = LlmSchemaComposer.parameters(model)({ - components: collection.components, - schema: typia.assert< - OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference - >(collection.schemas[0]), - config: LlmSchemaComposer.defaultConfig( - model, - ) satisfies ILlmSchema.IConfig as any, - }) as IResult, IOpenApiSchemaError>; - if (result.success === false) throw new Error("Invalid schema"); - return result.value; - }; diff --git a/test/src/features/llm/schema/validate_llm_schema_separate_string.ts b/test/src/features/llm/schema/validate_llm_schema_separate_string.ts deleted file mode 100644 index 43f8d18e..00000000 --- a/test/src/features/llm/schema/validate_llm_schema_separate_string.ts +++ /dev/null @@ -1,106 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { - ILlmSchema, - IOpenApiSchemaError, - IResult, - OpenApi, -} from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import typia, { IJsonSchemaCollection, tags } from "typia"; - -export const test_chatgpt_schema_separate_string = (): void => - validate_llm_schema_separate_string("chatgpt", false); - -export const test_claude_schema_separate_string = (): void => - validate_llm_schema_separate_string("claude", true); - -export const test_gemini_schema_separate_string = (): void => - validate_llm_schema_separate_string("gemini", true); - -export const test_llm_v30_schema_separate_string = (): void => { - validate_llm_schema_separate_string("3.0", false); - validate_llm_schema_separate_string("3.0", true); -}; - -export const test_llm_v31_schema_separate_string = (): void => { - validate_llm_schema_separate_string("3.1", false); - validate_llm_schema_separate_string("3.1", true); -}; - -const validate_llm_schema_separate_string = ( - model: Model, - constraint: boolean, -): void => { - const separator = (schema: ILlmSchema.IParameters) => - LlmSchemaComposer.separateParameters(model)({ - predicate: (s) => - LlmSchemaComposer.typeChecker(model).isString( - s as OpenApi.IJsonSchema.IString, - ) && - (constraint - ? (s as OpenApi.IJsonSchema.IString).contentMediaType !== undefined - : (s as OpenApi.IJsonSchema.IString).description?.includes( - "@contentMediaType", - ) === true), - parameters: schema as any, - }); - const plain: ILlmSchema.IParameters = schema( - model, - constraint, - )( - typia.json.schemas< - [ - { - name: string; - }, - ] - >(), - ); - const upload: ILlmSchema.IParameters = schema( - model, - constraint, - )( - typia.json.schemas< - [ - { - file: string & tags.ContentMediaType<"image/*">; - }, - ] - >(), - ); - TestValidator.equals("plain")(separator(plain))({ - llm: plain, - human: null, - }); - TestValidator.equals("upload")(separator(upload))({ - llm: { - type: "object", - properties: {}, - additionalProperties: false, - required: [], - $defs: {}, - }, - human: upload, - }); -}; - -const schema = - (model: Model, constraint: boolean) => - (collection: IJsonSchemaCollection): ILlmSchema.IParameters => { - const result: IResult< - ILlmSchema.IParameters, - IOpenApiSchemaError - > = LlmSchemaComposer.parameters(model)({ - components: collection.components, - schema: typia.assert< - OpenApi.IJsonSchema.IObject | OpenApi.IJsonSchema.IReference - >(collection.schemas[0]), - config: { - ...LlmSchemaComposer.defaultConfig(model), - reference: true, - constraint, - } satisfies ILlmSchema.IConfig as any, - }) as IResult, IOpenApiSchemaError>; - if (result.success === false) throw new Error("Invalid schema"); - return result.value; - }; diff --git a/test/src/features/llm/schema/validate_llm_type_checker_cover_any.ts b/test/src/features/llm/schema/validate_llm_type_checker_cover_any.ts deleted file mode 100644 index 3685f223..00000000 --- a/test/src/features/llm/schema/validate_llm_type_checker_cover_any.ts +++ /dev/null @@ -1,60 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { ILlmSchema, OpenApi } from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import typia, { IJsonSchemaCollection } from "typia"; - -export const test_chatgpt_type_checker_cover_any = (): void => - validate_llm_type_checker_cover_any("chatgpt"); - -export const test_claude_type_checker_cover_any = (): void => - validate_llm_type_checker_cover_any("claude"); - -export const test_gemini_type_checker_cover_any = (): void => - validate_llm_type_checker_cover_any("gemini"); - -export const test_llm_v30_type_checker_cover_any = (): void => - validate_llm_type_checker_cover_any("3.0"); - -export const test_llm_v31_type_checker_cover_any = (): void => - validate_llm_type_checker_cover_any("3.1"); - -const validate_llm_type_checker_cover_any = ( - model: Model, -) => { - const collection: IJsonSchemaCollection = typia.json.schemas<[IBasic]>(); - const result = LlmSchemaComposer.parameters(model)({ - config: LlmSchemaComposer.defaultConfig(model) as any, - components: collection.components, - schema: collection.schemas[0] as OpenApi.IJsonSchema.IReference, - }); - if (result.success === false) - throw new Error(`Failed to compose ${model} parameters.`); - - const parameters = result.value; - const check = (x: ILlmSchema, y: ILlmSchema): boolean => - model === "3.0" || model === "gemini" - ? (LlmSchemaComposer.typeChecker(model).covers as any)(x, y) - : (LlmSchemaComposer.typeChecker(model).covers as any)({ - x, - y, - $defs: (parameters as any).$defs, - }); - TestValidator.equals("any covers (string | null)")(true)( - check( - parameters.properties.any as ILlmSchema, - parameters.properties.string_or_null as ILlmSchema, - ), - ); - TestValidator.equals("any covers (string | undefined)")(true)( - check( - parameters.properties.any as ILlmSchema, - parameters.properties.string_or_undefined as ILlmSchema, - ), - ); -}; - -interface IBasic { - any: any; - string_or_null: null | string; - string_or_undefined: string | undefined; -} diff --git a/test/src/features/llm/schema/validate_llm_type_checker_cover_array.ts b/test/src/features/llm/schema/validate_llm_type_checker_cover_array.ts deleted file mode 100644 index f007e503..00000000 --- a/test/src/features/llm/schema/validate_llm_type_checker_cover_array.ts +++ /dev/null @@ -1,208 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { ILlmSchema, OpenApi } from "@samchon/openapi"; -import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; -import typia, { IJsonSchemaCollection } from "typia"; - -export const test_chatgpt_type_checker_cover_array = (): void => - validate_llm_type_checker_cover_array("chatgpt"); - -export const test_claude_type_checker_cover_array = (): void => - validate_llm_type_checker_cover_array("claude"); - -export const test_gemini_type_checker_cover_array = (): void => - validate_llm_type_checker_cover_array("gemini"); - -export const test_llm_v30_type_checker_cover_array = (): void => - validate_llm_type_checker_cover_array("3.0"); - -export const test_llm_v31_type_checker_cover_array = (): void => - validate_llm_type_checker_cover_array("3.1"); - -const validate_llm_type_checker_cover_array = ( - model: Model, -) => { - const collection: IJsonSchemaCollection = - typia.json.schemas<[Plan2D, Plan3D, Box2D, Box3D]>(); - const components: OpenApi.IComponents = collection.components as any; - const plan2D: OpenApi.IJsonSchema = components.schemas!.Plan2D; - const plan3D: OpenApi.IJsonSchema = components.schemas!.Plan3D; - const box2D: OpenApi.IJsonSchema = components.schemas!.Box2D; - const box3D: OpenApi.IJsonSchema = components.schemas!.Box3D; - - const $defs = {}; - const check = (x: OpenApi.IJsonSchema, y: OpenApi.IJsonSchema): boolean => { - const [a, b] = [x, y].map((schema) => { - const result = LlmSchemaComposer.schema(model)({ - config: LlmSchemaComposer.defaultConfig(model) as any, - components: collection.components, - schema: schema, - $defs, - }); - if (result.success === false) - throw new Error(`Failed to compose ${model} schema.`); - return result.value; - }); - return model === "3.0" - ? (LlmSchemaComposer.typeChecker(model).covers as any)(a, b) - : (LlmSchemaComposer.typeChecker(model).covers as any)({ - x: a, - y: b, - $defs, - }); - }; - - TestValidator.equals(model + " Plan3D[] covers Plan2D[]")(true)( - check({ type: "array", items: plan3D }, { type: "array", items: plan2D }), - ); - TestValidator.equals(model + " Box3D[] covers Box2D[]")(true)( - check({ type: "array", items: box3D }, { type: "array", items: box2D }), - ); - if (model !== "gemini") - TestValidator.equals( - model + " Array covers Array", - )(true)( - check( - { - type: "array", - items: { - oneOf: [plan3D, box3D], - }, - }, - { - type: "array", - items: { - oneOf: [plan2D, box2D], - }, - }, - ), - ); - if (model !== "gemini") - TestValidator.equals(model + " (Plan3D|Box3D)[] covers (Plan2D|Box2D)[]")( - true, - )( - check( - { - oneOf: [ - { type: "array", items: plan3D }, - { type: "array", items: box3D }, - ], - }, - { - oneOf: [ - { type: "array", items: plan2D }, - { type: "array", items: box2D }, - ], - }, - ), - ); - - TestValidator.equals(model + " Plan2D[] can't cover Plan3D[]")(false)( - check({ type: "array", items: plan2D }, { type: "array", items: plan3D }), - ); - TestValidator.equals(model + " Box2D[] can't cover Box3D[]")(false)( - check({ type: "array", items: box2D }, { type: "array", items: box3D }), - ); - if (model !== "gemini") - if (model !== "gemini") - TestValidator.equals( - "Array can't cover Array", - )(false)( - check( - { - type: "array", - items: { - oneOf: [plan2D, box2D], - }, - }, - { - type: "array", - items: { - oneOf: [plan3D, box3D], - }, - }, - ), - ); - if (model !== "gemini") - TestValidator.equals( - model + " (Plan2D[]|Box2D[]) can't cover (Plan3D[]|Box3D[])", - )(false)( - check( - { - oneOf: [ - { type: "array", items: plan2D }, - { type: "array", items: box2D }, - ], - }, - { - oneOf: [ - { type: "array", items: plan3D }, - { type: "array", items: box3D }, - ], - }, - ), - ); - if (model !== "gemini") - TestValidator.equals(model + " Plan3D[] can't cover (Plan2D|Box2D)[]")( - false, - )( - check( - { type: "array", items: plan3D }, - { - oneOf: [ - { type: "array", items: plan2D }, - { type: "array", items: box2D }, - ], - }, - ), - ); - if (model !== "gemini") - TestValidator.equals(model + " Box3D[] can't cover Array")( - false, - )( - check( - { type: "array", items: box3D }, - { - type: "array", - items: { - oneOf: [plan2D, box2D], - }, - }, - ), - ); -}; - -type Plan2D = { - center: Point2D; - size: Point2D; - geometries: Geometry2D[]; -}; -type Plan3D = { - center: Point3D; - size: Point3D; - geometries: Geometry3D[]; -}; -type Geometry3D = { - position: Point3D; - scale: Point3D; -}; -type Geometry2D = { - position: Point2D; - scale: Point2D; -}; -type Point2D = { - x: number; - y: number; -}; -type Point3D = { - x: number; - y: number; - z: number; -}; -type Box2D = { - size: Point2D; - nested: Point2D[]; -}; -type Box3D = { - size: Point3D; - nested: Point3D[]; -}; diff --git a/test/src/features/mcp/test_mcp_application.ts b/test/src/features/mcp/test_mcp_application.ts new file mode 100644 index 00000000..d2fdc1b2 --- /dev/null +++ b/test/src/features/mcp/test_mcp_application.ts @@ -0,0 +1,70 @@ +import { TestValidator } from "@nestia/e2e"; +import { + HttpLlm, + IHttpLlmApplication, + ILlmSchema, + IMcpLlmApplication, + IMcpLlmFunction, + LlmTypeChecker, + McpLlm, + OpenApi, +} from "@samchon/openapi"; +import fs from "fs"; + +import { TestGlobal } from "../../TestGlobal"; + +export const test_mcp_application = async (): Promise => { + const http: IHttpLlmApplication = HttpLlm.application({ + document: OpenApi.convert( + await fetch( + "https://raw.githubusercontent.com/samchon/shopping-backend/refs/heads/master/packages/api/swagger.json", + ).then((r) => r.json()), + ), + }); + const mcp: IMcpLlmApplication = McpLlm.application({ + tools: http.functions.map((f) => ({ + name: f.name, + description: f.description, + inputSchema: f.parameters, + })), + }); + TestValidator.equals("functions")(http.functions.length)( + mcp.functions.length, + ); + TestValidator.equals("errors")(0)(mcp.errors.length); + + http.functions.forEach((x) => { + const parameters: ILlmSchema.IParameters = { ...x.parameters }; + const visited: Set = new Set(); + LlmTypeChecker.visit({ + closure: (schema: any) => { + if (typeof schema.$ref === "string") + visited.add(schema.$ref.split("/").pop()!); + }, + schema: parameters, + $defs: parameters.$defs, + }); + (parameters as any).$defs = Object.fromEntries( + Object.entries((parameters as any).$defs).filter(([key]) => + visited.has(key), + ), + ); + const y: IMcpLlmFunction | undefined = mcp.functions.find( + (y) => y.name === x.name, + ); + TestValidator.equals( + `parameters: ${x.name}`, + (key) => + key === "description" || + key === "discriminator" || + key === "x-discriminator", + )(parameters)((y?.parameters as any) ?? {}); + }); + + if (process.argv.includes("--file")) + await fs.promises.writeFile( + `${TestGlobal.ROOT}/examples/mcp/application.json`, + JSON.stringify(mcp, null, 2), + "utf8", + ); +}; diff --git a/test/src/features/mcp/test_mcp_schema_ref.ts b/test/src/features/mcp/test_mcp_schema_ref.ts index 2ed35066..16807ded 100644 --- a/test/src/features/mcp/test_mcp_schema_ref.ts +++ b/test/src/features/mcp/test_mcp_schema_ref.ts @@ -1,25 +1,20 @@ import { TestValidator } from "@nestia/e2e"; import { - ChatGptTypeChecker, HttpLlm, IHttpLlmApplication, IHttpLlmFunction, IMcpLlmApplication, - LlamaTypeChecker, + LlmTypeChecker, McpLlm, } from "@samchon/openapi"; export const test_mcp_schema_ref = async (): Promise => { - const http: IHttpLlmApplication<"chatgpt"> = HttpLlm.application({ - model: "chatgpt", + const http: IHttpLlmApplication = HttpLlm.application({ document: await fetch( "https://raw.githubusercontent.com/samchon/shopping-backend/refs/heads/master/packages/api/swagger.json", ).then((r) => r.json()), - options: { - reference: true, - }, }); - const func: IHttpLlmFunction<"chatgpt"> | undefined = http.functions.find( + const func: IHttpLlmFunction | undefined = http.functions.find( (x) => Object.keys(x.parameters.$defs).length !== 0 && Object.keys(x.parameters.properties).length !== 0 && @@ -30,17 +25,16 @@ export const test_mcp_schema_ref = async (): Promise => { if (func === undefined) throw new Error("Function not found"); const visited: Set = new Set(); - ChatGptTypeChecker.visit({ + LlmTypeChecker.visit({ closure: (schema) => { - if (ChatGptTypeChecker.isReference(schema)) + if (LlmTypeChecker.isReference(schema)) visited.add(schema.$ref.split("/").pop()!); }, $defs: func.parameters.$defs, schema: func.parameters, }); - const mcp: IMcpLlmApplication<"chatgpt"> = McpLlm.application({ - model: "chatgpt", + const mcp: IMcpLlmApplication = McpLlm.application({ tools: [ { name: func.name, @@ -48,9 +42,6 @@ export const test_mcp_schema_ref = async (): Promise => { inputSchema: func.parameters, }, ], - options: { - reference: true, - }, }); TestValidator.equals( "schema", @@ -64,12 +55,12 @@ export const test_mcp_schema_ref = async (): Promise => { }; const isEmptyBody = ($defs: Record, input: any): boolean => { - if (LlamaTypeChecker.isReference(input)) { + if (LlmTypeChecker.isReference(input)) { const name: string = input.$ref.split("/").pop()!; return $defs[name] && isEmptyBody($defs, $defs[name]); } return ( - LlamaTypeChecker.isObject(input) && + LlmTypeChecker.isObject(input) && Object.keys(input.properties ?? {}).length === 0 ); }; diff --git a/test/src/features/mcp/validate_mcp_application.ts b/test/src/features/mcp/validate_mcp_application.ts deleted file mode 100644 index bd1a77ad..00000000 --- a/test/src/features/mcp/validate_mcp_application.ts +++ /dev/null @@ -1,94 +0,0 @@ -import { TestValidator } from "@nestia/e2e"; -import { HttpLlm, ILlmSchema, McpLlm, OpenApi } from "@samchon/openapi"; -import fs from "fs"; -import { Singleton, VariadicSingleton } from "tstl"; - -import { LlmSchemaComposer } from "../../../../lib/composers/LlmSchemaComposer"; -import { TestGlobal } from "../../TestGlobal"; - -export const test_mcp_application_of_chatgpt = () => - validate_mcp_application("chatgpt"); - -export const test_mcp_application_of_claude = () => - validate_mcp_application("claude"); - -export const test_mcp_application_of_gemini = () => - validate_mcp_application("gemini"); - -export const test_mcp_application_of_v30 = () => - validate_mcp_application("3.0"); - -export const test_mcp_application_of_v31 = () => - validate_mcp_application("3.1"); - -const validate_mcp_application = async ( - model: Model, -): Promise => { - const llm = await getApplication.get(model); - const mcp = McpLlm.application({ - model, - tools: llm.functions.map((f) => ({ - name: f.name, - description: f.description, - inputSchema: f.parameters, - })), - options: { - reference: true, - } as any, - }); - TestValidator.equals("functions")(llm.functions.length)(mcp.functions.length); - TestValidator.equals("errors")(0)(mcp.errors.length); - - llm.functions.forEach((x) => { - const parameters = { ...x.parameters }; - if (model !== "3.0") { - const visited: Set = new Set(); - LlmSchemaComposer.typeChecker(model).visit({ - closure: (schema: any) => { - if (typeof schema.$ref === "string") - visited.add(schema.$ref.split("/").pop()!); - }, - schema: parameters, - $defs: (parameters as any).$defs, - } as any); - (parameters as any).$defs = Object.fromEntries( - Object.entries((parameters as any).$defs).filter(([key]) => - visited.has(key), - ), - ); - } - const y = mcp.functions.find((y) => y.name === x.name); - TestValidator.equals( - `parameters: ${x.name}`, - (key) => - key === "description" || - key === "discriminator" || - key === "x-discriminator", - )(parameters)((y?.parameters as any) ?? {}); - }); - - if (process.argv.includes("--file")) - await fs.promises.writeFile( - `${TestGlobal.ROOT}/examples/mcp/${model}.application.json`, - JSON.stringify(mcp, null, 2), - "utf8", - ); -}; - -const getApplication = new VariadicSingleton(async (model: ILlmSchema.Model) => - HttpLlm.application({ - model, - document: await getDocument.get(), - options: { - reference: true, - } as any, - }), -); - -const getDocument = new Singleton(async () => - OpenApi.convert( - await fetch( - "https://raw.githubusercontent.com/samchon/shopping-backend/refs/heads/master/packages/api/swagger.json", - ).then((r) => r.json()), - ), -); diff --git a/test/src/utils/LlmApplicationFactory.ts b/test/src/utils/LlmApplicationFactory.ts index 672b22b0..48356c1e 100644 --- a/test/src/utils/LlmApplicationFactory.ts +++ b/test/src/utils/LlmApplicationFactory.ts @@ -4,6 +4,7 @@ import { ILlmSchema, IOpenApiSchemaError, IResult, + OpenApi, } from "@samchon/openapi"; import { LlmSchemaComposer } from "@samchon/openapi/lib/composers/LlmSchemaComposer"; import { IJsonSchemaApplication } from "typia"; @@ -11,55 +12,51 @@ import { IJsonSchemaApplication } from "typia"; import { OpenApiValidator } from "../../../lib/utils/OpenApiValidator"; export namespace LlmApplicationFactory { - export const convert = (props: { - model: Model; + export const convert = (props: { application: IJsonSchemaApplication; - config?: ILlmSchema.IConfig; - }): ILlmApplication => { - const config: ILlmSchema.IConfig = - props.config ?? LlmSchemaComposer.defaultConfig(props.model); + config?: Partial; + }): ILlmApplication => { + const config: ILlmSchema.IConfig = LlmSchemaComposer.getConfig( + props.config, + ); return { - model: props.model, functions: props.application.functions.map((func) => convertFunction({ - model: props.model, - options: config, + config, components: props.application.components, function: func, }), ), - options: config, + config: { + ...config, + separate: null, + validate: null, + }, }; }; - const convertFunction = (props: { - model: Model; - options: ILlmSchema.IConfig; + const convertFunction = (props: { + config: ILlmSchema.IConfig; components: IJsonSchemaApplication.IComponents; function: IJsonSchemaApplication.IFunction; - }): ILlmFunction => { - const parameters: IResult< - ILlmSchema.IParameters, - IOpenApiSchemaError - > = LlmSchemaComposer.parameters(props.model)({ - config: props.options as any, - components: props.components, - schema: props.function.parameters[0].schema as any, - }) satisfies IResult< - ILlmSchema.IParameters, - IOpenApiSchemaError - > as IResult, IOpenApiSchemaError>; + }): ILlmFunction => { + const parameters: IResult = + LlmSchemaComposer.parameters({ + config: props.config, + components: props.components, + schema: props.function.parameters[0].schema as + | OpenApi.IJsonSchema.IObject + | OpenApi.IJsonSchema.IReference, + }); if (parameters.success === false) { console.log(JSON.stringify(parameters.error, null, 2)); throw new Error("Failed to compose parameters schema."); } - const out = ( - schema: ILlmSchema | undefined, - ): ILlmFunction => ({ + const out = (schema: ILlmSchema | undefined): ILlmFunction => ({ name: props.function.name, description: props.function.description, - parameters: parameters.value as any, - output: schema as any, + parameters: parameters.value, + output: schema, validate: OpenApiValidator.create({ components: props.components, schema: props.function.parameters[0].schema, @@ -68,22 +65,17 @@ export namespace LlmApplicationFactory { }); if (props.function.output === undefined) return out(undefined); - const output: IResult< - ILlmSchema, - IOpenApiSchemaError - > = LlmSchemaComposer.schema(props.model)({ - config: props.options as any, - components: props.components, - schema: props.function.output.schema, - $defs: (parameters.value as any).$defs, - }) satisfies IResult as IResult< - ILlmSchema, - IOpenApiSchemaError - >; + const output: IResult = + LlmSchemaComposer.schema({ + config: props.config, + components: props.components, + schema: props.function.output.schema, + $defs: parameters.value.$defs, + }); if (output.success === false) { - console.log(JSON.stringify(output.error), null, 2); + console.log(JSON.stringify(output.error, null, 2)); throw new Error("Failed to compose output schema."); } - return out(output.value as any); + return out(output.value); }; } diff --git a/test/src/utils/LlmFunctionCaller.ts b/test/src/utils/LlmFunctionCaller.ts index 27ad13d0..93ac3eb3 100644 --- a/test/src/utils/LlmFunctionCaller.ts +++ b/test/src/utils/LlmFunctionCaller.ts @@ -6,21 +6,16 @@ import { TestGlobal } from "../TestGlobal"; import { ILlmTextPrompt } from "../dto/ILlmTextPrompt"; export namespace LlmFunctionCaller { - export interface IProps { + export interface IProps { vendor: string; - model: Model; - function: ILlmFunction; + function: ILlmFunction; texts: ILlmTextPrompt[]; handleCompletion: (input: any) => Promise; - handleParameters?: ( - parameters: ILlmSchema.ModelParameters[Model], - ) => Promise; + handleParameters?: (parameters: ILlmSchema.IParameters) => Promise; strict?: boolean; } - export const test = async ( - props: IProps, - ) => { + export const test = async (props: IProps) => { if ( TestGlobal.env.OPENAI_API_KEY === undefined || TestGlobal.env.OPENROUTER_API_KEY === undefined @@ -41,8 +36,8 @@ export namespace LlmFunctionCaller { }); }; - const step = async ( - props: IProps, + const step = async ( + props: IProps, previous?: IValidation.IFailure, ): Promise> => { const client: OpenAI = new OpenAI({