diff -pruN 0.26.4-3/.gitignore 0.45.0-1/.gitignore
--- 0.26.4-3/.gitignore	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/.gitignore	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+.tox/
+.*_cache
+__pycache__
+coverage.xml
+.coverage
+.idea/
+.vscode/
+site/
+tests/cli_doc/.cli_doc_collection.json
diff -pruN 0.26.4-3/PKG-INFO 0.45.0-1/PKG-INFO
--- 0.26.4-3/PKG-INFO	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/PKG-INFO	2025-12-19 19:37:31.000000000 +0000
@@ -1,52 +1,60 @@
-Metadata-Version: 2.1
+Metadata-Version: 2.4
 Name: datamodel-code-generator
-Version: 0.26.4
+Version: 0.45.0
 Summary: Datamodel Code Generator
-Home-page: https://github.com/koxudaxi/datamodel-code-generator
-License: MIT
-Author: Koudai Aono
-Author-email: koxudaxi@gmail.com
-Requires-Python: >=3.8,<4.0
+Project-URL: Homepage, https://github.com/koxudaxi/datamodel-code-generator
+Project-URL: Source, https://github.com/koxudaxi/datamodel-code-generator
+Author-email: Koudai Aono <koxudaxi@gmail.com>
+License-Expression: MIT
+License-File: LICENSE
 Classifier: Development Status :: 4 - Beta
 Classifier: License :: OSI Approved :: MIT License
 Classifier: Natural Language :: English
-Classifier: Programming Language :: Python :: 3
-Classifier: Programming Language :: Python :: 3.8
+Classifier: Programming Language :: Python :: 3 :: Only
 Classifier: Programming Language :: Python :: 3.9
 Classifier: Programming Language :: Python :: 3.10
 Classifier: Programming Language :: Python :: 3.11
 Classifier: Programming Language :: Python :: 3.12
 Classifier: Programming Language :: Python :: 3.13
+Classifier: Programming Language :: Python :: 3.14
 Classifier: Programming Language :: Python :: Implementation :: CPython
+Requires-Python: >=3.9
+Requires-Dist: argcomplete<4,>=2.10.1
+Requires-Dist: black>=19.10b0
+Requires-Dist: genson<2,>=1.2.1
+Requires-Dist: inflect<8,>=4.1
+Requires-Dist: isort<8,>=4.3.21
+Requires-Dist: jinja2<4,>=2.10.1
+Requires-Dist: packaging
+Requires-Dist: pydantic>=1.5
+Requires-Dist: pyyaml>=6.0.1
+Requires-Dist: tomli<3,>=2.2.1; python_version <= '3.11'
+Provides-Extra: all
+Requires-Dist: graphql-core>=3.2.3; extra == 'all'
+Requires-Dist: httpx>=0.24.1; extra == 'all'
+Requires-Dist: openapi-spec-validator<0.8,>=0.2.8; extra == 'all'
+Requires-Dist: prance>=0.18.2; extra == 'all'
+Requires-Dist: pysnooper<2,>=0.4.1; extra == 'all'
+Requires-Dist: ruff>=0.9.10; extra == 'all'
+Requires-Dist: watchfiles>=1.1; extra == 'all'
 Provides-Extra: debug
+Requires-Dist: pysnooper<2,>=0.4.1; extra == 'debug'
 Provides-Extra: graphql
+Requires-Dist: graphql-core>=3.2.3; extra == 'graphql'
 Provides-Extra: http
+Requires-Dist: httpx>=0.24.1; extra == 'http'
+Provides-Extra: ruff
+Requires-Dist: ruff>=0.9.10; extra == 'ruff'
 Provides-Extra: validation
-Requires-Dist: PySnooper (>=0.4.1,<2.0.0) ; extra == "debug"
-Requires-Dist: argcomplete (>=1.10,<4.0)
-Requires-Dist: black (>=19.10b0)
-Requires-Dist: genson (>=1.2.1,<2.0)
-Requires-Dist: graphql-core (>=3.2.3,<4.0.0) ; extra == "graphql"
-Requires-Dist: httpx ; extra == "http"
-Requires-Dist: inflect (>=4.1.0,<6.0)
-Requires-Dist: isort (>=4.3.21,<6.0)
-Requires-Dist: jinja2 (>=2.10.1,<4.0)
-Requires-Dist: openapi-spec-validator (>=0.2.8,<0.7.0) ; extra == "validation"
-Requires-Dist: packaging
-Requires-Dist: prance (>=0.18.2) ; extra == "validation"
-Requires-Dist: pydantic[email] (>=1.10.0,!=2.0.0,!=2.0.1,<3.0,!=2.4.0) ; python_version >= "3.12" and python_version < "4.0"
-Requires-Dist: pydantic[email] (>=1.10.0,<3.0,!=2.4.0) ; python_version >= "3.11" and python_version < "4.0"
-Requires-Dist: pydantic[email] (>=1.5.1,<3.0,!=2.4.0) ; python_version < "3.10"
-Requires-Dist: pydantic[email] (>=1.9.0,<3.0,!=2.4.0) ; python_version >= "3.10" and python_version < "3.11"
-Requires-Dist: pyyaml (>=6.0.1)
-Requires-Dist: toml (>=0.10.0,<1.0.0) ; python_version < "3.11"
-Project-URL: Repository, https://github.com/koxudaxi/datamodel-code-generator
+Requires-Dist: openapi-spec-validator<0.8,>=0.2.8; extra == 'validation'
+Requires-Dist: prance>=0.18.2; extra == 'validation'
+Provides-Extra: watch
+Requires-Dist: watchfiles>=1.1; extra == 'watch'
 Description-Content-Type: text/markdown
 
 # datamodel-code-generator
 
-This code generator creates [pydantic v1 and v2](https://docs.pydantic.dev/) model, [dataclasses.dataclass](https://docs.python.org/3/library/dataclasses.html), [typing.TypedDict](https://docs.python.org/3/library/typing.html#typing.TypedDict) 
-and [msgspec.Struct](https://github.com/jcrist/msgspec) from an openapi file and others.
+🚀 Generate Python data models from schema definitions in seconds.
 
 [![PyPI version](https://badge.fury.io/py/datamodel-code-generator.svg)](https://pypi.python.org/pypi/datamodel-code-generator)
 [![Conda-forge](https://img.shields.io/conda/v/conda-forge/datamodel-code-generator)](https://anaconda.org/conda-forge/datamodel-code-generator)
@@ -54,527 +62,240 @@ and [msgspec.Struct](https://github.com/
 [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/datamodel-code-generator)](https://pypi.python.org/pypi/datamodel-code-generator)
 [![codecov](https://codecov.io/gh/koxudaxi/datamodel-code-generator/graph/badge.svg?token=plzSSFb9Li)](https://codecov.io/gh/koxudaxi/datamodel-code-generator)
 ![license](https://img.shields.io/github/license/koxudaxi/datamodel-code-generator.svg)
-[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
 [![Pydantic v1](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/pydantic/pydantic/main/docs/badge/v1.json)](https://pydantic.dev)
 [![Pydantic v2](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/pydantic/pydantic/main/docs/badge/v2.json)](https://pydantic.dev)
 
-## Help
-See [documentation](https://koxudaxi.github.io/datamodel-code-generator) for more details.
+## ✨ What it does
+
+- 📄 Converts **OpenAPI 3**, **JSON Schema**, **GraphQL**, and raw data (JSON/YAML/CSV) into Python models
+- 🎯 Generates **Pydantic v1/v2**, **dataclasses**, **TypedDict**, or **msgspec** output
+- 🔗 Handles complex schemas: `$ref`, `allOf`, `oneOf`, `anyOf`, enums, and nested types
+- ✅ Produces type-safe, validated code ready for your IDE and type checker
+
+---
+
+## 📖 Documentation
+
+**👉 [koxudaxi.github.io/datamodel-code-generator](https://koxudaxi.github.io/datamodel-code-generator)**
+
+- 🖥️ [CLI Reference](https://koxudaxi.github.io/datamodel-code-generator/cli-reference/) - All command-line options
+- ⚙️ [pyproject.toml](https://koxudaxi.github.io/datamodel-code-generator/pyproject_toml/) - Configuration file
+- 🔄 [CI/CD Integration](https://koxudaxi.github.io/datamodel-code-generator/ci-cd/) - GitHub Actions, pre-commit hooks
+- 🚀 [One-liner Usage](https://koxudaxi.github.io/datamodel-code-generator/oneliner/) - uvx, pipx, clipboard integration
+- ❓ [FAQ](https://koxudaxi.github.io/datamodel-code-generator/faq/) - Common questions
+
+---
+
+## 📦 Installation
+
+```bash
+pip install datamodel-code-generator
+```
+
+<details>
+<summary>Other installation methods</summary>
+
+**uv:**
+```bash
+uv add datamodel-code-generator
+```
+
+**conda:**
+```bash
+conda install -c conda-forge datamodel-code-generator
+```
 
-## Quick Installation
+**With HTTP support** (for resolving remote `$ref`):
+```bash
+pip install 'datamodel-code-generator[http]'
+```
 
-To install `datamodel-code-generator`:
+**With GraphQL support:**
 ```bash
-$ pip install datamodel-code-generator
+pip install 'datamodel-code-generator[graphql]'
 ```
 
-## Simple Usage
-You can generate models from a local file.
+**Docker:**
 ```bash
-$ datamodel-codegen --input api.yaml --output model.py
+docker pull koxudaxi/datamodel-code-generator
+```
+
+</details>
+
+---
+
+## 🏃 Quick Start
+
+```bash
+datamodel-codegen --input schema.json --input-file-type jsonschema --output-model-type pydantic_v2.BaseModel --output model.py
 ```
 
 <details>
-<summary>api.yaml</summary>
+<summary>📄 schema.json (input)</summary>
 
-```yaml
-openapi: "3.0.0"
-info:
-  version: 1.0.0
-  title: Swagger Petstore
-  license:
-    name: MIT
-servers:
-  - url: http://petstore.swagger.io/v1
-paths:
-  /pets:
-    get:
-      summary: List all pets
-      operationId: listPets
-      tags:
-        - pets
-      parameters:
-        - name: limit
-          in: query
-          description: How many items to return at one time (max 100)
-          required: false
-          schema:
-            type: integer
-            format: int32
-      responses:
-        '200':
-          description: A paged array of pets
-          headers:
-            x-next:
-              description: A link to the next page of responses
-              schema:
-                type: string
-          content:
-            application/json:
-              schema:
-                $ref: "#/components/schemas/Pets"
-        default:
-          description: unexpected error
-          content:
-            application/json:
-              schema:
-                $ref: "#/components/schemas/Error"
-                x-amazon-apigateway-integration:
-                  uri:
-                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
-                  passthroughBehavior: when_no_templates
-                  httpMethod: POST
-                  type: aws_proxy
-    post:
-      summary: Create a pet
-      operationId: createPets
-      tags:
-        - pets
-      responses:
-        '201':
-          description: Null response
-        default:
-          description: unexpected error
-          content:
-            application/json:
-              schema:
-                $ref: "#/components/schemas/Error"
-                x-amazon-apigateway-integration:
-                  uri:
-                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
-                  passthroughBehavior: when_no_templates
-                  httpMethod: POST
-                  type: aws_proxy
-  /pets/{petId}:
-    get:
-      summary: Info for a specific pet
-      operationId: showPetById
-      tags:
-        - pets
-      parameters:
-        - name: petId
-          in: path
-          required: true
-          description: The id of the pet to retrieve
-          schema:
-            type: string
-      responses:
-        '200':
-          description: Expected response to a valid request
-          content:
-            application/json:
-              schema:
-                $ref: "#/components/schemas/Pets"
-        default:
-          description: unexpected error
-          content:
-            application/json:
-              schema:
-                $ref: "#/components/schemas/Error"
-    x-amazon-apigateway-integration:
-      uri:
-        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
-      passthroughBehavior: when_no_templates
-      httpMethod: POST
-      type: aws_proxy
-components:
-  schemas:
-    Pet:
-      required:
-        - id
-        - name
-      properties:
-        id:
-          type: integer
-          format: int64
-        name:
-          type: string
-        tag:
-          type: string
-    Pets:
-      type: array
-      items:
-        $ref: "#/components/schemas/Pet"
-    Error:
-      required:
-        - code
-        - message
-      properties:
-        code:
-          type: integer
-          format: int32
-        message:
-          type: string
-    apis:
-      type: array
-      items:
-        type: object
-        properties:
-          apiKey:
-            type: string
-            description: To be used as a dataset parameter value
-          apiVersionNumber:
-            type: string
-            description: To be used as a version parameter value
-          apiUrl:
-            type: string
-            format: uri
-            description: "The URL describing the dataset's fields"
-          apiDocumentationUrl:
-            type: string
-            format: uri
-            description: A URL to the API console for each API
+```json
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Pet",
+  "type": "object",
+  "required": ["name", "species"],
+  "properties": {
+    "name": {
+      "type": "string",
+      "description": "The pet's name"
+    },
+    "species": {
+      "type": "string",
+      "enum": ["dog", "cat", "bird", "fish"]
+    },
+    "age": {
+      "type": "integer",
+      "minimum": 0,
+      "description": "Age in years"
+    },
+    "vaccinated": {
+      "type": "boolean",
+      "default": false
+    }
+  }
+}
 ```
 
 </details>
 
 <details>
-<summary>model.py</summary>
+<summary>🐍 model.py (output)</summary>
 
 ```python
 # generated by datamodel-codegen:
-#   filename:  api.yaml
-#   timestamp: 2020-06-02T05:28:24+00:00
+#   filename:  schema.json
 
 from __future__ import annotations
 
-from typing import List, Optional
-
-from pydantic import AnyUrl, BaseModel, Field
+from enum import Enum
+from typing import Optional
 
+from pydantic import BaseModel, Field
 
-class Pet(BaseModel):
-    id: int
-    name: str
-    tag: Optional[str] = None
 
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    bird = 'bird'
+    fish = 'fish'
 
-class Pets(BaseModel):
-    __root__: List[Pet]
 
+class Pet(BaseModel):
+    name: str = Field(..., description="The pet's name")
+    species: Species
+    age: Optional[int] = Field(None, description='Age in years', ge=0)
+    vaccinated: Optional[bool] = False
+```
 
-class Error(BaseModel):
-    code: int
-    message: str
+</details>
 
+---
 
-class Api(BaseModel):
-    apiKey: Optional[str] = Field(
-        None, description='To be used as a dataset parameter value'
-    )
-    apiVersionNumber: Optional[str] = Field(
-        None, description='To be used as a version parameter value'
-    )
-    apiUrl: Optional[AnyUrl] = Field(
-        None, description="The URL describing the dataset's fields"
-    )
-    apiDocumentationUrl: Optional[AnyUrl] = Field(
-        None, description='A URL to the API console for each API'
-    )
+## 📥 Supported Input
 
+- OpenAPI 3 (YAML/JSON)
+- JSON Schema
+- JSON / YAML / CSV data
+- GraphQL schema
+- Python dictionary
 
-class Apis(BaseModel):
-    __root__: List[Api]
-```
-</details>
+## 📤 Supported Output
 
-## Supported input types
--  OpenAPI 3 (YAML/JSON, [OpenAPI Data Type](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#data-types));
--  JSON Schema ([JSON Schema Core](http://json-schema.org/draft/2019-09/json-schema-validation.html)/[JSON Schema Validation](http://json-schema.org/draft/2019-09/json-schema-validation.html));
--  JSON/YAML/CSV Data (it will be converted to JSON Schema);
--  Python dictionary (it will be converted to JSON Schema);
--  GraphQL schema ([GraphQL Schemas and Types](https://graphql.org/learn/schema/));
-
-## Supported output types
-- [pydantic](https://docs.pydantic.dev/1.10/).BaseModel;
-- [pydantic_v2](https://docs.pydantic.dev/2.0/).BaseModel;
-- [dataclasses.dataclass](https://docs.python.org/3/library/dataclasses.html);
-- [typing.TypedDict](https://docs.python.org/3/library/typing.html#typing.TypedDict);
-- [msgspec.Struct](https://github.com/jcrist/msgspec);
-- Custom type from your [jinja2](https://jinja.palletsprojects.com/en/3.1.x/) template;
+- [pydantic v1](https://docs.pydantic.dev/1.10/) BaseModel
+- [pydantic v2](https://docs.pydantic.dev/) BaseModel
+- [dataclasses](https://docs.python.org/3/library/dataclasses.html)
+- [TypedDict](https://docs.python.org/3/library/typing.html#typing.TypedDict)
+- [msgspec](https://github.com/jcrist/msgspec) Struct
 
-## Sponsors
-<table>
-  <tr>
-    <td valign="top" align="center">
-    <a href="https://github.com/JetBrainsOfficial">
-      <img src="https://avatars.githubusercontent.com/u/60931315?s=100&v=4" alt="JetBrains Logo" style="width: 100px;">
-      <p>JetBrains</p>
-    </a>
-    </td>
-  <td valign="top" align="center">
-    <a href="https://github.com/astral-sh">
-      <img src="https://avatars.githubusercontent.com/u/115962839?s=200&v=4" alt="Astral Logo" style="width: 100px;">
-      <p>Astral</p>
-    </a>
-  </td>
-  <td valign="top" align="center">
-    <a href="https://github.com/DataDog">
-      <img src="https://avatars.githubusercontent.com/u/365230?s=200&v=4" alt="Datadog, Inc. Logo" style="width: 100px;">
-      <p>Datadog, Inc.</p>
-    </a>
-  </td>
-  </tr>
-</table>
+---
 
-## Projects that use datamodel-code-generator
- 
-These OSS projects use datamodel-code-generator to generate many models. 
-See the following linked projects for real world examples and inspiration.
-
-- [airbytehq/airbyte](https://github.com/airbytehq/airbyte)
-  - *[Generate Python, Java/Kotlin, and Typescript protocol models](https://github.com/airbytehq/airbyte-protocol/tree/main/protocol-models/bin)*
-- [apache/iceberg](https://github.com/apache/iceberg)
-  - *[Generate Python code](https://github.com/apache/iceberg/blob/d2e1094ee0cc6239d43f63ba5114272f59d605d2/open-api/README.md?plain=1#L39)* 
-    *[`make generate`](https://github.com/apache/iceberg/blob/d2e1094ee0cc6239d43f63ba5114272f59d605d2/open-api/Makefile#L24-L34)*
-- [argoproj-labs/hera](https://github.com/argoproj-labs/hera)
-  - *[`Makefile`](https://github.com/argoproj-labs/hera/blob/c8cbf0c7a676de57469ca3d6aeacde7a5e84f8b7/Makefile#L53-L62)*
-- [awslabs/aws-lambda-powertools-python](https://github.com/awslabs/aws-lambda-powertools-python)
-  - *Recommended for [advanced-use-cases](https://awslabs.github.io/aws-lambda-powertools-python/2.6.0/utilities/parser/#advanced-use-cases) in the official documentation*
-- [DataDog/integrations-core](https://github.com/DataDog/integrations-core)
-  - *[Config models](https://github.com/DataDog/integrations-core/blob/master/docs/developer/meta/config-models.md)*
-- [hashintel/hash](https://github.com/hashintel/hash)
-  - *[`codegen.sh`](https://github.com/hashintel/hash/blob/9762b1a1937e14f6b387677e4c7fe4a5f3d4a1e1/libs/%40local/hash-graph-client/python/scripts/codegen.sh#L21-L39)*
-- [IBM/compliance-trestle](https://github.com/IBM/compliance-trestle)
-  - *[Building the models from the OSCAL schemas.](https://github.com/IBM/compliance-trestle/blob/develop/docs/contributing/website.md#building-the-models-from-the-oscal-schemas)*
-- [Netflix/consoleme](https://github.com/Netflix/consoleme)
-  - *[How do I generate models from the Swagger specification?](https://github.com/Netflix/consoleme/blob/master/docs/gitbook/faq.md#how-do-i-generate-models-from-the-swagger-specification)*
-- [Nike-Inc/brickflow](https://github.com/Nike-Inc/brickflow)
-  - *[Code generate tools](https://github.com/Nike-Inc/brickflow/blob/e3245bf638588867b831820a6675ada76b2010bf/tools/README.md?plain=1#L8)[`./tools/gen-bundle.sh`](https://github.com/Nike-Inc/brickflow/blob/e3245bf638588867b831820a6675ada76b2010bf/tools/gen-bundle.sh#L15-L22)*
-- [open-metadata/OpenMetadata](https://github.com/open-metadata/OpenMetadata)
-  - *[Makefile](https://github.com/open-metadata/OpenMetadata/blob/main/Makefile)*
-- [PostHog/posthog](https://github.com/PostHog/posthog)
-  - *[Generate models via `npm run`](https://github.com/PostHog/posthog/blob/e1a55b9cb38d01225224bebf8f0c1e28faa22399/package.json#L41)* 
-- [SeldonIO/MLServer](https://github.com/SeldonIO/MLServer)
-  - *[generate-types.sh](https://github.com/SeldonIO/MLServer/blob/master/hack/generate-types.sh)*
+## 🍳 Common Recipes
 
-## Installation
-
-To install `datamodel-code-generator`:
+### 🆕 Generate Pydantic v2 models
 ```bash
-$ pip install datamodel-code-generator
+datamodel-codegen --input schema.json --input-file-type jsonschema --output-model-type pydantic_v2.BaseModel --output model.py
 ```
 
-### `http` extra option
-If you want to resolve `$ref` for remote files then you should specify `http` extra option.
+### 🌐 Generate from URL
 ```bash
-$ pip install 'datamodel-code-generator[http]'
+pip install 'datamodel-code-generator[http]'
+datamodel-codegen --url https://example.com/api/openapi.yaml --input-file-type openapi --output-model-type pydantic_v2.BaseModel --output model.py
 ```
 
-### `graphql` extra option
-
-If you want to generate data model from a GraphQL schema then you should specify `graphql` extra option.
-```bash
-$ pip install 'datamodel-code-generator[graphql]'
+### ⚙️ Use with pyproject.toml
+```toml
+[tool.datamodel-codegen]
+input = "schema.yaml"
+input-file-type = "openapi"
+output = "src/models.py"
+output-model-type = "pydantic_v2.BaseModel"
 ```
 
-### Docker Image
-The docker image is in [Docker Hub](https://hub.docker.com/r/koxudaxi/datamodel-code-generator)
-```bash
-$ docker pull koxudaxi/datamodel-code-generator
-```
+See [pyproject.toml Configuration](https://koxudaxi.github.io/datamodel-code-generator/pyproject_toml/) for more options.
 
-## Advanced Uses
-You can generate models from a URL.
+### 🔄 CI/CD Integration
 ```bash
-$ datamodel-codegen --url https://<INPUT FILE URL> --output model.py
+datamodel-codegen --check
 ```
-This method needs the [http extra option](#http-extra-option)
 
+Verify generated code stays in sync with schemas. See [CI/CD Integration](https://koxudaxi.github.io/datamodel-code-generator/ci-cd/) for GitHub Actions and pre-commit hooks.
 
-## All Command Options
+---
 
-The `datamodel-codegen` command:
+## 💖 Sponsors
 
-<!-- start command help -->
-```bash
-usage: 
-  datamodel-codegen [options]
+<table>
+  <tr>
+    <td valign="top" align="center">
+      <a href="https://github.com/astral-sh">
+        <img src="https://avatars.githubusercontent.com/u/115962839?s=200&v=4" alt="Astral Logo" style="width: 100px;">
+        <p>Astral</p>
+      </a>
+    </td>
+  </tr>
+</table>
+
+---
+
+## 🏢 Projects that use datamodel-code-generator
+
+These projects use datamodel-code-generator. See the linked examples for real-world usage.
+
+- [PostHog/posthog](https://github.com/PostHog/posthog) - *[Generate models via npm run](https://github.com/PostHog/posthog/blob/e1a55b9cb38d01225224bebf8f0c1e28faa22399/package.json#L41)*
+- [airbytehq/airbyte](https://github.com/airbytehq/airbyte) - *[Generate Python, Java/Kotlin, and Typescript protocol models](https://github.com/airbytehq/airbyte-protocol/tree/main/protocol-models/bin)*
+- [apache/iceberg](https://github.com/apache/iceberg) - *[Generate Python code](https://github.com/apache/iceberg/blob/d2e1094ee0cc6239d43f63ba5114272f59d605d2/open-api/README.md?plain=1#L39)*
+- [open-metadata/OpenMetadata](https://github.com/open-metadata/OpenMetadata) - *[datamodel_generation.py](https://github.com/open-metadata/OpenMetadata/blob/main/scripts/datamodel_generation.py)*
+- [awslabs/aws-lambda-powertools-python](https://github.com/awslabs/aws-lambda-powertools-python) - *[Recommended for advanced-use-cases](https://awslabs.github.io/aws-lambda-powertools-python/2.6.0/utilities/parser/#advanced-use-cases)*
+- [Netflix/consoleme](https://github.com/Netflix/consoleme) - *[Generate models from Swagger](https://github.com/Netflix/consoleme/blob/master/docs/gitbook/faq.md#how-do-i-generate-models-from-the-swagger-specification)*
+- [DataDog/integrations-core](https://github.com/DataDog/integrations-core) - *[Config models](https://github.com/DataDog/integrations-core/blob/master/docs/developer/meta/config-models.md)*
+- [argoproj-labs/hera](https://github.com/argoproj-labs/hera) - *[Makefile](https://github.com/argoproj-labs/hera/blob/c8cbf0c7a676de57469ca3d6aeacde7a5e84f8b7/Makefile#L53-L62)*
+- [SeldonIO/MLServer](https://github.com/SeldonIO/MLServer) - *[generate-types.sh](https://github.com/SeldonIO/MLServer/blob/master/hack/generate-types.sh)*
+- [geojupyter/jupytergis](https://github.com/geojupyter/jupytergis) - *[Python type generation from JSONSchema](https://jupytergis.readthedocs.io/en/latest/contributor_guide/explanation/code-generation.html)*
+- [Nike-Inc/brickflow](https://github.com/Nike-Inc/brickflow) - *[Code generate tools](https://github.com/Nike-Inc/brickflow/blob/e3245bf638588867b831820a6675ada76b2010bf/tools/README.md?plain=1#L8)*
+- [cloudcoil/cloudcoil](https://github.com/cloudcoil/cloudcoil) - *[Model generation](https://github.com/cloudcoil/cloudcoil#%EF%B8%8F-model-generation)*
+- [IBM/compliance-trestle](https://github.com/IBM/compliance-trestle) - *[Building models from OSCAL schemas](https://github.com/IBM/compliance-trestle/blob/develop/docs/contributing/website.md#building-the-models-from-the-oscal-schemas)*
+- [hashintel/hash](https://github.com/hashintel/hash) - *[codegen.sh](https://github.com/hashintel/hash/blob/9762b1a1937e14f6b387677e4c7fe4a5f3d4a1e1/libs/%40local/hash-graph-client/python/scripts/codegen.sh#L21-L39)*
+
+[See all dependents →](https://github.com/koxudaxi/datamodel-code-generator/network/dependents)
+
+---
 
-Generate Python data models from schema definitions or structured data
+## 🔗 Related Projects
 
-Options:
-  --additional-imports ADDITIONAL_IMPORTS
-                        Custom imports for output (delimited list input). For example
-                        "datetime.date,datetime.datetime"
-  --custom-formatters CUSTOM_FORMATTERS
-                        List of modules with custom formatter (delimited list input).
-  --http-headers HTTP_HEADER [HTTP_HEADER ...]
-                        Set headers in HTTP requests to the remote host. (example:
-                        "Authorization: Basic dXNlcjpwYXNz")
-  --http-ignore-tls     Disable verification of the remote host''s TLS certificate
-  --http-query-parameters HTTP_QUERY_PARAMETERS [HTTP_QUERY_PARAMETERS ...]
-                        Set query parameters in HTTP requests to the remote host. (example:
-                        "ref=branch")
-  --input INPUT         Input file/directory (default: stdin)
-  --input-file-type {auto,openapi,jsonschema,json,yaml,dict,csv,graphql}
-                        Input file type (default: auto)
-  --output OUTPUT       Output file (default: stdout)
-  --output-model-type {pydantic.BaseModel,pydantic_v2.BaseModel,dataclasses.dataclass,typing.TypedDict,msgspec.Struct}
-                        Output model type (default: pydantic.BaseModel)
-  --url URL             Input file URL. `--input` is ignored when `--url` is used
-
-Typing customization:
-  --base-class BASE_CLASS
-                        Base Class (default: pydantic.BaseModel)
-  --enum-field-as-literal {all,one}
-                        Parse enum field as literal. all: all enum field type are Literal.
-                        one: field type is Literal when an enum has only one possible value
-  --field-constraints   Use field constraints and not con* annotations
-  --set-default-enum-member
-                        Set enum members as default values for enum field
-  --strict-types {str,bytes,int,float,bool} [{str,bytes,int,float,bool} ...]
-                        Use strict types
-  --use-annotated       Use typing.Annotated for Field(). Also, `--field-constraints` option
-                        will be enabled.
-  --use-generic-container-types
-                        Use generic container types for type hinting (typing.Sequence,
-                        typing.Mapping). If `--use-standard-collections` option is set, then
-                        import from collections.abc instead of typing
-  --use-non-positive-negative-number-constrained-types
-                        Use the Non{Positive,Negative}{FloatInt} types instead of the
-                        corresponding con* constrained types.
-  --use-one-literal-as-default
-                        Use one literal as default value for one literal field
-  --use-standard-collections
-                        Use standard collections for type hinting (list, dict)
-  --use-subclass-enum   Define Enum class as subclass with field type when enum has type
-                        (int, float, bytes, str)
-  --use-union-operator  Use | operator for Union type (PEP 604).
-  --use-unique-items-as-set
-                        define field type as `set` when the field attribute has
-                        `uniqueItems`
-
-Field customization:
-  --capitalise-enum-members, --capitalize-enum-members
-                        Capitalize field names on enum
-  --empty-enum-field-name EMPTY_ENUM_FIELD_NAME
-                        Set field name when enum value is empty (default: `_`)
-  --field-extra-keys FIELD_EXTRA_KEYS [FIELD_EXTRA_KEYS ...]
-                        Add extra keys to field parameters
-  --field-extra-keys-without-x-prefix FIELD_EXTRA_KEYS_WITHOUT_X_PREFIX [FIELD_EXTRA_KEYS_WITHOUT_X_PREFIX ...]
-                        Add extra keys with `x-` prefix to field parameters. The extra keys
-                        are stripped of the `x-` prefix.
-  --field-include-all-keys
-                        Add all keys to field parameters
-  --force-optional      Force optional for required fields
-  --no-alias            Do not add a field alias. E.g., if --snake-case-field is used along
-                        with a base class, which has an alias_generator
-  --original-field-name-delimiter ORIGINAL_FIELD_NAME_DELIMITER
-                        Set delimiter to convert to snake case. This option only can be used
-                        with --snake-case-field (default: `_` )
-  --remove-special-field-name-prefix
-                        Remove field name prefix if it has a special meaning e.g.
-                        underscores
-  --snake-case-field    Change camel-case field name to snake-case
-  --special-field-name-prefix SPECIAL_FIELD_NAME_PREFIX
-                        Set field name prefix when first character can''t be used as Python
-                        field name (default: `field`)
-  --strip-default-none  Strip default None on fields
-  --union-mode {smart,left_to_right}
-                        Union mode for only pydantic v2 field
-  --use-default         Use default value even if a field is required
-  --use-default-kwarg   Use `default=` instead of a positional argument for Fields that have
-                        default values.
-  --use-field-description
-                        Use schema description to populate field docstring
-
-Model customization:
-  --allow-extra-fields  Allow to pass extra fields, if this flag is not passed, extra fields
-                        are forbidden.
-  --allow-population-by-field-name
-                        Allow population by field name
-  --class-name CLASS_NAME
-                        Set class name of root model
-  --collapse-root-models
-                        Models generated with a root-type field will be merged into the
-                        models using that root-type model
-  --disable-appending-item-suffix
-                        Disable appending `Item` suffix to model name in an array
-  --disable-timestamp   Disable timestamp on file headers
-  --enable-faux-immutability
-                        Enable faux immutability
-  --enable-version-header
-                        Enable package version on file headers
-  --keep-model-order    Keep generated models'' order
-  --keyword-only        Defined models as keyword only (for example
-                        dataclass(kw_only=True)).
-  --output-datetime-class {datetime,AwareDatetime,NaiveDatetime}
-                        Choose Datetime class between AwareDatetime, NaiveDatetime or
-                        datetime. Each output model has its default mapping (for example
-                        pydantic: datetime, dataclass: str, ...)
-  --reuse-model         Reuse models on the field when a module has the model with the same
-                        content
-  --target-python-version {3.6,3.7,3.8,3.9,3.10,3.11,3.12}
-                        target python version (default: 3.8)
-  --treat-dot-as-module
-                        treat dotted module names as modules
-  --use-exact-imports   import exact types instead of modules, for example: "from .foo
-                        import Bar" instead of "from . import foo" with "foo.Bar"
-  --use-pendulum        use pendulum instead of datetime
-  --use-schema-description
-                        Use schema description to populate class docstring
-  --use-title-as-name   use titles as class names of models
-
-Template customization:
-  --aliases ALIASES     Alias mapping file
-  --custom-file-header CUSTOM_FILE_HEADER
-                        Custom file header
-  --custom-file-header-path CUSTOM_FILE_HEADER_PATH
-                        Custom file header file path
-  --custom-formatters-kwargs CUSTOM_FORMATTERS_KWARGS
-                        A file with kwargs for custom formatters.
-  --custom-template-dir CUSTOM_TEMPLATE_DIR
-                        Custom template directory
-  --encoding ENCODING   The encoding of input and output (default: utf-8)
-  --extra-template-data EXTRA_TEMPLATE_DATA
-                        Extra template data
-  --use-double-quotes   Model generated with double quotes. Single quotes or your black
-                        config skip_string_normalization value will be used without this
-                        option.
-  --wrap-string-literal
-                        Wrap string literal by using black `experimental-string-processing`
-                        option (require black 20.8b0 or later)
-
-OpenAPI-only options:
-  --openapi-scopes {schemas,paths,tags,parameters} [{schemas,paths,tags,parameters} ...]
-                        Scopes of OpenAPI model generation (default: schemas)
-  --strict-nullable     Treat default field as a non-nullable field (Only OpenAPI)
-  --use-operation-id-as-name
-                        use operation id of OpenAPI as class names of models
-  --validation          Deprecated: Enable validation (Only OpenAPI). this option is
-                        deprecated. it will be removed in future releases
-
-General options:
-  --debug               show debug message (require "debug". `$ pip install ''datamodel-code-
-                        generator[debug]''`)
-  --disable-warnings    disable warnings
-  --no-color            disable colorized output
-  --version             show version
-  -h, --help            show this help message and exit
-```
-<!-- end command help -->
-
-## Related projects
-### fastapi-code-generator
-This code generator creates [FastAPI](https://github.com/tiangolo/fastapi) app from an openapi file.
-
-[https://github.com/koxudaxi/fastapi-code-generator](https://github.com/koxudaxi/fastapi-code-generator)
-
-### pydantic-pycharm-plugin
-[A JetBrains PyCharm plugin](https://plugins.jetbrains.com/plugin/12861-pydantic) for [`pydantic`](https://github.com/samuelcolvin/pydantic).
-
-[https://github.com/koxudaxi/pydantic-pycharm-plugin](https://github.com/koxudaxi/pydantic-pycharm-plugin)
-
-## PyPi
-
-[https://pypi.org/project/datamodel-code-generator](https://pypi.org/project/datamodel-code-generator)
+- **[fastapi-code-generator](https://github.com/koxudaxi/fastapi-code-generator)** - Generate FastAPI app from OpenAPI
+- **[pydantic-pycharm-plugin](https://github.com/koxudaxi/pydantic-pycharm-plugin)** - PyCharm plugin for Pydantic
 
-## Contributing
+---
 
-See `docs/development-contributing.md` for how to get started!
+## 🤝 Contributing
 
-## License
+See [Development & Contributing](https://koxudaxi.github.io/datamodel-code-generator/development-contributing/) for how to get started!
 
-datamodel-code-generator is released under the MIT License. http://www.opensource.org/licenses/mit-license
+## 📄 License
 
+MIT License - see [LICENSE](LICENSE) for details.
diff -pruN 0.26.4-3/README.md 0.45.0-1/README.md
--- 0.26.4-3/README.md	2024-12-15 17:25:57.703037000 +0000
+++ 0.45.0-1/README.md	2025-12-19 19:37:31.000000000 +0000
@@ -1,7 +1,6 @@
 # datamodel-code-generator
 
-This code generator creates [pydantic v1 and v2](https://docs.pydantic.dev/) model, [dataclasses.dataclass](https://docs.python.org/3/library/dataclasses.html), [typing.TypedDict](https://docs.python.org/3/library/typing.html#typing.TypedDict) 
-and [msgspec.Struct](https://github.com/jcrist/msgspec) from an openapi file and others.
+🚀 Generate Python data models from schema definitions in seconds.
 
 [![PyPI version](https://badge.fury.io/py/datamodel-code-generator.svg)](https://pypi.python.org/pypi/datamodel-code-generator)
 [![Conda-forge](https://img.shields.io/conda/v/conda-forge/datamodel-code-generator)](https://anaconda.org/conda-forge/datamodel-code-generator)
@@ -9,526 +8,240 @@ and [msgspec.Struct](https://github.com/
 [![PyPI - Python Version](https://img.shields.io/pypi/pyversions/datamodel-code-generator)](https://pypi.python.org/pypi/datamodel-code-generator)
 [![codecov](https://codecov.io/gh/koxudaxi/datamodel-code-generator/graph/badge.svg?token=plzSSFb9Li)](https://codecov.io/gh/koxudaxi/datamodel-code-generator)
 ![license](https://img.shields.io/github/license/koxudaxi/datamodel-code-generator.svg)
-[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff)
 [![Pydantic v1](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/pydantic/pydantic/main/docs/badge/v1.json)](https://pydantic.dev)
 [![Pydantic v2](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/pydantic/pydantic/main/docs/badge/v2.json)](https://pydantic.dev)
 
-## Help
-See [documentation](https://koxudaxi.github.io/datamodel-code-generator) for more details.
+## ✨ What it does
 
-## Quick Installation
+- 📄 Converts **OpenAPI 3**, **JSON Schema**, **GraphQL**, and raw data (JSON/YAML/CSV) into Python models
+- 🎯 Generates **Pydantic v1/v2**, **dataclasses**, **TypedDict**, or **msgspec** output
+- 🔗 Handles complex schemas: `$ref`, `allOf`, `oneOf`, `anyOf`, enums, and nested types
+- ✅ Produces type-safe, validated code ready for your IDE and type checker
 
-To install `datamodel-code-generator`:
+---
+
+## 📖 Documentation
+
+**👉 [koxudaxi.github.io/datamodel-code-generator](https://koxudaxi.github.io/datamodel-code-generator)**
+
+- 🖥️ [CLI Reference](https://koxudaxi.github.io/datamodel-code-generator/cli-reference/) - All command-line options
+- ⚙️ [pyproject.toml](https://koxudaxi.github.io/datamodel-code-generator/pyproject_toml/) - Configuration file
+- 🔄 [CI/CD Integration](https://koxudaxi.github.io/datamodel-code-generator/ci-cd/) - GitHub Actions, pre-commit hooks
+- 🚀 [One-liner Usage](https://koxudaxi.github.io/datamodel-code-generator/oneliner/) - uvx, pipx, clipboard integration
+- ❓ [FAQ](https://koxudaxi.github.io/datamodel-code-generator/faq/) - Common questions
+
+---
+
+## 📦 Installation
+
+```bash
+pip install datamodel-code-generator
+```
+
+<details>
+<summary>Other installation methods</summary>
+
+**uv:**
+```bash
+uv add datamodel-code-generator
+```
+
+**conda:**
+```bash
+conda install -c conda-forge datamodel-code-generator
+```
+
+**With HTTP support** (for resolving remote `$ref`):
+```bash
+pip install 'datamodel-code-generator[http]'
+```
+
+**With GraphQL support:**
 ```bash
-$ pip install datamodel-code-generator
+pip install 'datamodel-code-generator[graphql]'
 ```
 
-## Simple Usage
-You can generate models from a local file.
+**Docker:**
 ```bash
-$ datamodel-codegen --input api.yaml --output model.py
+docker pull koxudaxi/datamodel-code-generator
+```
+
+</details>
+
+---
+
+## 🏃 Quick Start
+
+```bash
+datamodel-codegen --input schema.json --input-file-type jsonschema --output-model-type pydantic_v2.BaseModel --output model.py
 ```
 
 <details>
-<summary>api.yaml</summary>
+<summary>📄 schema.json (input)</summary>
 
-```yaml
-openapi: "3.0.0"
-info:
-  version: 1.0.0
-  title: Swagger Petstore
-  license:
-    name: MIT
-servers:
-  - url: http://petstore.swagger.io/v1
-paths:
-  /pets:
-    get:
-      summary: List all pets
-      operationId: listPets
-      tags:
-        - pets
-      parameters:
-        - name: limit
-          in: query
-          description: How many items to return at one time (max 100)
-          required: false
-          schema:
-            type: integer
-            format: int32
-      responses:
-        '200':
-          description: A paged array of pets
-          headers:
-            x-next:
-              description: A link to the next page of responses
-              schema:
-                type: string
-          content:
-            application/json:
-              schema:
-                $ref: "#/components/schemas/Pets"
-        default:
-          description: unexpected error
-          content:
-            application/json:
-              schema:
-                $ref: "#/components/schemas/Error"
-                x-amazon-apigateway-integration:
-                  uri:
-                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
-                  passthroughBehavior: when_no_templates
-                  httpMethod: POST
-                  type: aws_proxy
-    post:
-      summary: Create a pet
-      operationId: createPets
-      tags:
-        - pets
-      responses:
-        '201':
-          description: Null response
-        default:
-          description: unexpected error
-          content:
-            application/json:
-              schema:
-                $ref: "#/components/schemas/Error"
-                x-amazon-apigateway-integration:
-                  uri:
-                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
-                  passthroughBehavior: when_no_templates
-                  httpMethod: POST
-                  type: aws_proxy
-  /pets/{petId}:
-    get:
-      summary: Info for a specific pet
-      operationId: showPetById
-      tags:
-        - pets
-      parameters:
-        - name: petId
-          in: path
-          required: true
-          description: The id of the pet to retrieve
-          schema:
-            type: string
-      responses:
-        '200':
-          description: Expected response to a valid request
-          content:
-            application/json:
-              schema:
-                $ref: "#/components/schemas/Pets"
-        default:
-          description: unexpected error
-          content:
-            application/json:
-              schema:
-                $ref: "#/components/schemas/Error"
-    x-amazon-apigateway-integration:
-      uri:
-        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
-      passthroughBehavior: when_no_templates
-      httpMethod: POST
-      type: aws_proxy
-components:
-  schemas:
-    Pet:
-      required:
-        - id
-        - name
-      properties:
-        id:
-          type: integer
-          format: int64
-        name:
-          type: string
-        tag:
-          type: string
-    Pets:
-      type: array
-      items:
-        $ref: "#/components/schemas/Pet"
-    Error:
-      required:
-        - code
-        - message
-      properties:
-        code:
-          type: integer
-          format: int32
-        message:
-          type: string
-    apis:
-      type: array
-      items:
-        type: object
-        properties:
-          apiKey:
-            type: string
-            description: To be used as a dataset parameter value
-          apiVersionNumber:
-            type: string
-            description: To be used as a version parameter value
-          apiUrl:
-            type: string
-            format: uri
-            description: "The URL describing the dataset's fields"
-          apiDocumentationUrl:
-            type: string
-            format: uri
-            description: A URL to the API console for each API
+```json
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Pet",
+  "type": "object",
+  "required": ["name", "species"],
+  "properties": {
+    "name": {
+      "type": "string",
+      "description": "The pet's name"
+    },
+    "species": {
+      "type": "string",
+      "enum": ["dog", "cat", "bird", "fish"]
+    },
+    "age": {
+      "type": "integer",
+      "minimum": 0,
+      "description": "Age in years"
+    },
+    "vaccinated": {
+      "type": "boolean",
+      "default": false
+    }
+  }
+}
 ```
 
 </details>
 
 <details>
-<summary>model.py</summary>
+<summary>🐍 model.py (output)</summary>
 
 ```python
 # generated by datamodel-codegen:
-#   filename:  api.yaml
-#   timestamp: 2020-06-02T05:28:24+00:00
+#   filename:  schema.json
 
 from __future__ import annotations
 
-from typing import List, Optional
+from enum import Enum
+from typing import Optional
 
-from pydantic import AnyUrl, BaseModel, Field
-
-
-class Pet(BaseModel):
-    id: int
-    name: str
-    tag: Optional[str] = None
+from pydantic import BaseModel, Field
 
 
-class Pets(BaseModel):
-    __root__: List[Pet]
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    bird = 'bird'
+    fish = 'fish'
 
 
-class Error(BaseModel):
-    code: int
-    message: str
+class Pet(BaseModel):
+    name: str = Field(..., description="The pet's name")
+    species: Species
+    age: Optional[int] = Field(None, description='Age in years', ge=0)
+    vaccinated: Optional[bool] = False
+```
 
+</details>
 
-class Api(BaseModel):
-    apiKey: Optional[str] = Field(
-        None, description='To be used as a dataset parameter value'
-    )
-    apiVersionNumber: Optional[str] = Field(
-        None, description='To be used as a version parameter value'
-    )
-    apiUrl: Optional[AnyUrl] = Field(
-        None, description="The URL describing the dataset's fields"
-    )
-    apiDocumentationUrl: Optional[AnyUrl] = Field(
-        None, description='A URL to the API console for each API'
-    )
+---
 
+## 📥 Supported Input
 
-class Apis(BaseModel):
-    __root__: List[Api]
-```
-</details>
+- OpenAPI 3 (YAML/JSON)
+- JSON Schema
+- JSON / YAML / CSV data
+- GraphQL schema
+- Python dictionary
 
-## Supported input types
--  OpenAPI 3 (YAML/JSON, [OpenAPI Data Type](https://github.com/OAI/OpenAPI-Specification/blob/main/versions/3.0.2.md#data-types));
--  JSON Schema ([JSON Schema Core](http://json-schema.org/draft/2019-09/json-schema-validation.html)/[JSON Schema Validation](http://json-schema.org/draft/2019-09/json-schema-validation.html));
--  JSON/YAML/CSV Data (it will be converted to JSON Schema);
--  Python dictionary (it will be converted to JSON Schema);
--  GraphQL schema ([GraphQL Schemas and Types](https://graphql.org/learn/schema/));
-
-## Supported output types
-- [pydantic](https://docs.pydantic.dev/1.10/).BaseModel;
-- [pydantic_v2](https://docs.pydantic.dev/2.0/).BaseModel;
-- [dataclasses.dataclass](https://docs.python.org/3/library/dataclasses.html);
-- [typing.TypedDict](https://docs.python.org/3/library/typing.html#typing.TypedDict);
-- [msgspec.Struct](https://github.com/jcrist/msgspec);
-- Custom type from your [jinja2](https://jinja.palletsprojects.com/en/3.1.x/) template;
+## 📤 Supported Output
 
-## Sponsors
-<table>
-  <tr>
-    <td valign="top" align="center">
-    <a href="https://github.com/JetBrainsOfficial">
-      <img src="https://avatars.githubusercontent.com/u/60931315?s=100&v=4" alt="JetBrains Logo" style="width: 100px;">
-      <p>JetBrains</p>
-    </a>
-    </td>
-  <td valign="top" align="center">
-    <a href="https://github.com/astral-sh">
-      <img src="https://avatars.githubusercontent.com/u/115962839?s=200&v=4" alt="Astral Logo" style="width: 100px;">
-      <p>Astral</p>
-    </a>
-  </td>
-  <td valign="top" align="center">
-    <a href="https://github.com/DataDog">
-      <img src="https://avatars.githubusercontent.com/u/365230?s=200&v=4" alt="Datadog, Inc. Logo" style="width: 100px;">
-      <p>Datadog, Inc.</p>
-    </a>
-  </td>
-  </tr>
-</table>
+- [pydantic v1](https://docs.pydantic.dev/1.10/) BaseModel
+- [pydantic v2](https://docs.pydantic.dev/) BaseModel
+- [dataclasses](https://docs.python.org/3/library/dataclasses.html)
+- [TypedDict](https://docs.python.org/3/library/typing.html#typing.TypedDict)
+- [msgspec](https://github.com/jcrist/msgspec) Struct
 
-## Projects that use datamodel-code-generator
- 
-These OSS projects use datamodel-code-generator to generate many models. 
-See the following linked projects for real world examples and inspiration.
-
-- [airbytehq/airbyte](https://github.com/airbytehq/airbyte)
-  - *[Generate Python, Java/Kotlin, and Typescript protocol models](https://github.com/airbytehq/airbyte-protocol/tree/main/protocol-models/bin)*
-- [apache/iceberg](https://github.com/apache/iceberg)
-  - *[Generate Python code](https://github.com/apache/iceberg/blob/d2e1094ee0cc6239d43f63ba5114272f59d605d2/open-api/README.md?plain=1#L39)* 
-    *[`make generate`](https://github.com/apache/iceberg/blob/d2e1094ee0cc6239d43f63ba5114272f59d605d2/open-api/Makefile#L24-L34)*
-- [argoproj-labs/hera](https://github.com/argoproj-labs/hera)
-  - *[`Makefile`](https://github.com/argoproj-labs/hera/blob/c8cbf0c7a676de57469ca3d6aeacde7a5e84f8b7/Makefile#L53-L62)*
-- [awslabs/aws-lambda-powertools-python](https://github.com/awslabs/aws-lambda-powertools-python)
-  - *Recommended for [advanced-use-cases](https://awslabs.github.io/aws-lambda-powertools-python/2.6.0/utilities/parser/#advanced-use-cases) in the official documentation*
-- [DataDog/integrations-core](https://github.com/DataDog/integrations-core)
-  - *[Config models](https://github.com/DataDog/integrations-core/blob/master/docs/developer/meta/config-models.md)*
-- [hashintel/hash](https://github.com/hashintel/hash)
-  - *[`codegen.sh`](https://github.com/hashintel/hash/blob/9762b1a1937e14f6b387677e4c7fe4a5f3d4a1e1/libs/%40local/hash-graph-client/python/scripts/codegen.sh#L21-L39)*
-- [IBM/compliance-trestle](https://github.com/IBM/compliance-trestle)
-  - *[Building the models from the OSCAL schemas.](https://github.com/IBM/compliance-trestle/blob/develop/docs/contributing/website.md#building-the-models-from-the-oscal-schemas)*
-- [Netflix/consoleme](https://github.com/Netflix/consoleme)
-  - *[How do I generate models from the Swagger specification?](https://github.com/Netflix/consoleme/blob/master/docs/gitbook/faq.md#how-do-i-generate-models-from-the-swagger-specification)*
-- [Nike-Inc/brickflow](https://github.com/Nike-Inc/brickflow)
-  - *[Code generate tools](https://github.com/Nike-Inc/brickflow/blob/e3245bf638588867b831820a6675ada76b2010bf/tools/README.md?plain=1#L8)[`./tools/gen-bundle.sh`](https://github.com/Nike-Inc/brickflow/blob/e3245bf638588867b831820a6675ada76b2010bf/tools/gen-bundle.sh#L15-L22)*
-- [open-metadata/OpenMetadata](https://github.com/open-metadata/OpenMetadata)
-  - *[Makefile](https://github.com/open-metadata/OpenMetadata/blob/main/Makefile)*
-- [PostHog/posthog](https://github.com/PostHog/posthog)
-  - *[Generate models via `npm run`](https://github.com/PostHog/posthog/blob/e1a55b9cb38d01225224bebf8f0c1e28faa22399/package.json#L41)* 
-- [SeldonIO/MLServer](https://github.com/SeldonIO/MLServer)
-  - *[generate-types.sh](https://github.com/SeldonIO/MLServer/blob/master/hack/generate-types.sh)*
+---
 
-## Installation
+## 🍳 Common Recipes
 
-To install `datamodel-code-generator`:
+### 🆕 Generate Pydantic v2 models
 ```bash
-$ pip install datamodel-code-generator
+datamodel-codegen --input schema.json --input-file-type jsonschema --output-model-type pydantic_v2.BaseModel --output model.py
 ```
 
-### `http` extra option
-If you want to resolve `$ref` for remote files then you should specify `http` extra option.
+### 🌐 Generate from URL
 ```bash
-$ pip install 'datamodel-code-generator[http]'
+pip install 'datamodel-code-generator[http]'
+datamodel-codegen --url https://example.com/api/openapi.yaml --input-file-type openapi --output-model-type pydantic_v2.BaseModel --output model.py
 ```
 
-### `graphql` extra option
-
-If you want to generate data model from a GraphQL schema then you should specify `graphql` extra option.
-```bash
-$ pip install 'datamodel-code-generator[graphql]'
+### ⚙️ Use with pyproject.toml
+```toml
+[tool.datamodel-codegen]
+input = "schema.yaml"
+input-file-type = "openapi"
+output = "src/models.py"
+output-model-type = "pydantic_v2.BaseModel"
 ```
 
-### Docker Image
-The docker image is in [Docker Hub](https://hub.docker.com/r/koxudaxi/datamodel-code-generator)
-```bash
-$ docker pull koxudaxi/datamodel-code-generator
-```
+See [pyproject.toml Configuration](https://koxudaxi.github.io/datamodel-code-generator/pyproject_toml/) for more options.
 
-## Advanced Uses
-You can generate models from a URL.
+### 🔄 CI/CD Integration
 ```bash
-$ datamodel-codegen --url https://<INPUT FILE URL> --output model.py
+datamodel-codegen --check
 ```
-This method needs the [http extra option](#http-extra-option)
 
+Verify generated code stays in sync with schemas. See [CI/CD Integration](https://koxudaxi.github.io/datamodel-code-generator/ci-cd/) for GitHub Actions and pre-commit hooks.
 
-## All Command Options
+---
 
-The `datamodel-codegen` command:
+## 💖 Sponsors
 
-<!-- start command help -->
-```bash
-usage: 
-  datamodel-codegen [options]
+<table>
+  <tr>
+    <td valign="top" align="center">
+      <a href="https://github.com/astral-sh">
+        <img src="https://avatars.githubusercontent.com/u/115962839?s=200&v=4" alt="Astral Logo" style="width: 100px;">
+        <p>Astral</p>
+      </a>
+    </td>
+  </tr>
+</table>
+
+---
+
+## 🏢 Projects that use datamodel-code-generator
+
+These projects use datamodel-code-generator. See the linked examples for real-world usage.
+
+- [PostHog/posthog](https://github.com/PostHog/posthog) - *[Generate models via npm run](https://github.com/PostHog/posthog/blob/e1a55b9cb38d01225224bebf8f0c1e28faa22399/package.json#L41)*
+- [airbytehq/airbyte](https://github.com/airbytehq/airbyte) - *[Generate Python, Java/Kotlin, and Typescript protocol models](https://github.com/airbytehq/airbyte-protocol/tree/main/protocol-models/bin)*
+- [apache/iceberg](https://github.com/apache/iceberg) - *[Generate Python code](https://github.com/apache/iceberg/blob/d2e1094ee0cc6239d43f63ba5114272f59d605d2/open-api/README.md?plain=1#L39)*
+- [open-metadata/OpenMetadata](https://github.com/open-metadata/OpenMetadata) - *[datamodel_generation.py](https://github.com/open-metadata/OpenMetadata/blob/main/scripts/datamodel_generation.py)*
+- [awslabs/aws-lambda-powertools-python](https://github.com/awslabs/aws-lambda-powertools-python) - *[Recommended for advanced-use-cases](https://awslabs.github.io/aws-lambda-powertools-python/2.6.0/utilities/parser/#advanced-use-cases)*
+- [Netflix/consoleme](https://github.com/Netflix/consoleme) - *[Generate models from Swagger](https://github.com/Netflix/consoleme/blob/master/docs/gitbook/faq.md#how-do-i-generate-models-from-the-swagger-specification)*
+- [DataDog/integrations-core](https://github.com/DataDog/integrations-core) - *[Config models](https://github.com/DataDog/integrations-core/blob/master/docs/developer/meta/config-models.md)*
+- [argoproj-labs/hera](https://github.com/argoproj-labs/hera) - *[Makefile](https://github.com/argoproj-labs/hera/blob/c8cbf0c7a676de57469ca3d6aeacde7a5e84f8b7/Makefile#L53-L62)*
+- [SeldonIO/MLServer](https://github.com/SeldonIO/MLServer) - *[generate-types.sh](https://github.com/SeldonIO/MLServer/blob/master/hack/generate-types.sh)*
+- [geojupyter/jupytergis](https://github.com/geojupyter/jupytergis) - *[Python type generation from JSONSchema](https://jupytergis.readthedocs.io/en/latest/contributor_guide/explanation/code-generation.html)*
+- [Nike-Inc/brickflow](https://github.com/Nike-Inc/brickflow) - *[Code generate tools](https://github.com/Nike-Inc/brickflow/blob/e3245bf638588867b831820a6675ada76b2010bf/tools/README.md?plain=1#L8)*
+- [cloudcoil/cloudcoil](https://github.com/cloudcoil/cloudcoil) - *[Model generation](https://github.com/cloudcoil/cloudcoil#%EF%B8%8F-model-generation)*
+- [IBM/compliance-trestle](https://github.com/IBM/compliance-trestle) - *[Building models from OSCAL schemas](https://github.com/IBM/compliance-trestle/blob/develop/docs/contributing/website.md#building-the-models-from-the-oscal-schemas)*
+- [hashintel/hash](https://github.com/hashintel/hash) - *[codegen.sh](https://github.com/hashintel/hash/blob/9762b1a1937e14f6b387677e4c7fe4a5f3d4a1e1/libs/%40local/hash-graph-client/python/scripts/codegen.sh#L21-L39)*
+
+[See all dependents →](https://github.com/koxudaxi/datamodel-code-generator/network/dependents)
+
+---
+
+## 🔗 Related Projects
 
-Generate Python data models from schema definitions or structured data
+- **[fastapi-code-generator](https://github.com/koxudaxi/fastapi-code-generator)** - Generate FastAPI app from OpenAPI
+- **[pydantic-pycharm-plugin](https://github.com/koxudaxi/pydantic-pycharm-plugin)** - PyCharm plugin for Pydantic
 
-Options:
-  --additional-imports ADDITIONAL_IMPORTS
-                        Custom imports for output (delimited list input). For example
-                        "datetime.date,datetime.datetime"
-  --custom-formatters CUSTOM_FORMATTERS
-                        List of modules with custom formatter (delimited list input).
-  --http-headers HTTP_HEADER [HTTP_HEADER ...]
-                        Set headers in HTTP requests to the remote host. (example:
-                        "Authorization: Basic dXNlcjpwYXNz")
-  --http-ignore-tls     Disable verification of the remote host''s TLS certificate
-  --http-query-parameters HTTP_QUERY_PARAMETERS [HTTP_QUERY_PARAMETERS ...]
-                        Set query parameters in HTTP requests to the remote host. (example:
-                        "ref=branch")
-  --input INPUT         Input file/directory (default: stdin)
-  --input-file-type {auto,openapi,jsonschema,json,yaml,dict,csv,graphql}
-                        Input file type (default: auto)
-  --output OUTPUT       Output file (default: stdout)
-  --output-model-type {pydantic.BaseModel,pydantic_v2.BaseModel,dataclasses.dataclass,typing.TypedDict,msgspec.Struct}
-                        Output model type (default: pydantic.BaseModel)
-  --url URL             Input file URL. `--input` is ignored when `--url` is used
-
-Typing customization:
-  --base-class BASE_CLASS
-                        Base Class (default: pydantic.BaseModel)
-  --enum-field-as-literal {all,one}
-                        Parse enum field as literal. all: all enum field type are Literal.
-                        one: field type is Literal when an enum has only one possible value
-  --field-constraints   Use field constraints and not con* annotations
-  --set-default-enum-member
-                        Set enum members as default values for enum field
-  --strict-types {str,bytes,int,float,bool} [{str,bytes,int,float,bool} ...]
-                        Use strict types
-  --use-annotated       Use typing.Annotated for Field(). Also, `--field-constraints` option
-                        will be enabled.
-  --use-generic-container-types
-                        Use generic container types for type hinting (typing.Sequence,
-                        typing.Mapping). If `--use-standard-collections` option is set, then
-                        import from collections.abc instead of typing
-  --use-non-positive-negative-number-constrained-types
-                        Use the Non{Positive,Negative}{FloatInt} types instead of the
-                        corresponding con* constrained types.
-  --use-one-literal-as-default
-                        Use one literal as default value for one literal field
-  --use-standard-collections
-                        Use standard collections for type hinting (list, dict)
-  --use-subclass-enum   Define Enum class as subclass with field type when enum has type
-                        (int, float, bytes, str)
-  --use-union-operator  Use | operator for Union type (PEP 604).
-  --use-unique-items-as-set
-                        define field type as `set` when the field attribute has
-                        `uniqueItems`
-
-Field customization:
-  --capitalise-enum-members, --capitalize-enum-members
-                        Capitalize field names on enum
-  --empty-enum-field-name EMPTY_ENUM_FIELD_NAME
-                        Set field name when enum value is empty (default: `_`)
-  --field-extra-keys FIELD_EXTRA_KEYS [FIELD_EXTRA_KEYS ...]
-                        Add extra keys to field parameters
-  --field-extra-keys-without-x-prefix FIELD_EXTRA_KEYS_WITHOUT_X_PREFIX [FIELD_EXTRA_KEYS_WITHOUT_X_PREFIX ...]
-                        Add extra keys with `x-` prefix to field parameters. The extra keys
-                        are stripped of the `x-` prefix.
-  --field-include-all-keys
-                        Add all keys to field parameters
-  --force-optional      Force optional for required fields
-  --no-alias            Do not add a field alias. E.g., if --snake-case-field is used along
-                        with a base class, which has an alias_generator
-  --original-field-name-delimiter ORIGINAL_FIELD_NAME_DELIMITER
-                        Set delimiter to convert to snake case. This option only can be used
-                        with --snake-case-field (default: `_` )
-  --remove-special-field-name-prefix
-                        Remove field name prefix if it has a special meaning e.g.
-                        underscores
-  --snake-case-field    Change camel-case field name to snake-case
-  --special-field-name-prefix SPECIAL_FIELD_NAME_PREFIX
-                        Set field name prefix when first character can''t be used as Python
-                        field name (default: `field`)
-  --strip-default-none  Strip default None on fields
-  --union-mode {smart,left_to_right}
-                        Union mode for only pydantic v2 field
-  --use-default         Use default value even if a field is required
-  --use-default-kwarg   Use `default=` instead of a positional argument for Fields that have
-                        default values.
-  --use-field-description
-                        Use schema description to populate field docstring
-
-Model customization:
-  --allow-extra-fields  Allow to pass extra fields, if this flag is not passed, extra fields
-                        are forbidden.
-  --allow-population-by-field-name
-                        Allow population by field name
-  --class-name CLASS_NAME
-                        Set class name of root model
-  --collapse-root-models
-                        Models generated with a root-type field will be merged into the
-                        models using that root-type model
-  --disable-appending-item-suffix
-                        Disable appending `Item` suffix to model name in an array
-  --disable-timestamp   Disable timestamp on file headers
-  --enable-faux-immutability
-                        Enable faux immutability
-  --enable-version-header
-                        Enable package version on file headers
-  --keep-model-order    Keep generated models'' order
-  --keyword-only        Defined models as keyword only (for example
-                        dataclass(kw_only=True)).
-  --output-datetime-class {datetime,AwareDatetime,NaiveDatetime}
-                        Choose Datetime class between AwareDatetime, NaiveDatetime or
-                        datetime. Each output model has its default mapping (for example
-                        pydantic: datetime, dataclass: str, ...)
-  --reuse-model         Reuse models on the field when a module has the model with the same
-                        content
-  --target-python-version {3.6,3.7,3.8,3.9,3.10,3.11,3.12}
-                        target python version (default: 3.8)
-  --treat-dot-as-module
-                        treat dotted module names as modules
-  --use-exact-imports   import exact types instead of modules, for example: "from .foo
-                        import Bar" instead of "from . import foo" with "foo.Bar"
-  --use-pendulum        use pendulum instead of datetime
-  --use-schema-description
-                        Use schema description to populate class docstring
-  --use-title-as-name   use titles as class names of models
-
-Template customization:
-  --aliases ALIASES     Alias mapping file
-  --custom-file-header CUSTOM_FILE_HEADER
-                        Custom file header
-  --custom-file-header-path CUSTOM_FILE_HEADER_PATH
-                        Custom file header file path
-  --custom-formatters-kwargs CUSTOM_FORMATTERS_KWARGS
-                        A file with kwargs for custom formatters.
-  --custom-template-dir CUSTOM_TEMPLATE_DIR
-                        Custom template directory
-  --encoding ENCODING   The encoding of input and output (default: utf-8)
-  --extra-template-data EXTRA_TEMPLATE_DATA
-                        Extra template data
-  --use-double-quotes   Model generated with double quotes. Single quotes or your black
-                        config skip_string_normalization value will be used without this
-                        option.
-  --wrap-string-literal
-                        Wrap string literal by using black `experimental-string-processing`
-                        option (require black 20.8b0 or later)
-
-OpenAPI-only options:
-  --openapi-scopes {schemas,paths,tags,parameters} [{schemas,paths,tags,parameters} ...]
-                        Scopes of OpenAPI model generation (default: schemas)
-  --strict-nullable     Treat default field as a non-nullable field (Only OpenAPI)
-  --use-operation-id-as-name
-                        use operation id of OpenAPI as class names of models
-  --validation          Deprecated: Enable validation (Only OpenAPI). this option is
-                        deprecated. it will be removed in future releases
-
-General options:
-  --debug               show debug message (require "debug". `$ pip install ''datamodel-code-
-                        generator[debug]''`)
-  --disable-warnings    disable warnings
-  --no-color            disable colorized output
-  --version             show version
-  -h, --help            show this help message and exit
-```
-<!-- end command help -->
-
-## Related projects
-### fastapi-code-generator
-This code generator creates [FastAPI](https://github.com/tiangolo/fastapi) app from an openapi file.
-
-[https://github.com/koxudaxi/fastapi-code-generator](https://github.com/koxudaxi/fastapi-code-generator)
-
-### pydantic-pycharm-plugin
-[A JetBrains PyCharm plugin](https://plugins.jetbrains.com/plugin/12861-pydantic) for [`pydantic`](https://github.com/samuelcolvin/pydantic).
-
-[https://github.com/koxudaxi/pydantic-pycharm-plugin](https://github.com/koxudaxi/pydantic-pycharm-plugin)
-
-## PyPi
-
-[https://pypi.org/project/datamodel-code-generator](https://pypi.org/project/datamodel-code-generator)
+---
 
-## Contributing
+## 🤝 Contributing
 
-See `docs/development-contributing.md` for how to get started!
+See [Development & Contributing](https://koxudaxi.github.io/datamodel-code-generator/development-contributing/) for how to get started!
 
-## License
+## 📄 License
 
-datamodel-code-generator is released under the MIT License. http://www.opensource.org/licenses/mit-license
+MIT License - see [LICENSE](LICENSE) for details.
diff -pruN 0.26.4-3/datamodel_code_generator/__init__.py 0.45.0-1/datamodel_code_generator/__init__.py
--- 0.26.4-3/datamodel_code_generator/__init__.py	2024-12-15 17:25:57.703037000 +0000
+++ 0.45.0-1/datamodel_code_generator/__init__.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,587 +0,0 @@
-from __future__ import annotations
-
-import contextlib
-import os
-import sys
-from datetime import datetime, timezone
-from enum import Enum
-from pathlib import Path
-from typing import (
-    IO,
-    TYPE_CHECKING,
-    Any,
-    Callable,
-    DefaultDict,
-    Dict,
-    Iterator,
-    List,
-    Mapping,
-    Optional,
-    Sequence,
-    Set,
-    TextIO,
-    Tuple,
-    Type,
-    TypeVar,
-    Union,
-)
-from urllib.parse import ParseResult
-
-import yaml
-
-import datamodel_code_generator.pydantic_patch  # noqa: F401
-from datamodel_code_generator.format import DatetimeClassType, PythonVersion
-from datamodel_code_generator.model.pydantic_v2 import UnionMode
-from datamodel_code_generator.parser import DefaultPutDict, LiteralType
-from datamodel_code_generator.parser.base import Parser
-from datamodel_code_generator.types import StrictTypes
-from datamodel_code_generator.util import SafeLoader  # type: ignore
-
-T = TypeVar('T')
-
-try:
-    import pysnooper
-
-    pysnooper.tracer.DISABLED = True
-except ImportError:  # pragma: no cover
-    pysnooper = None
-
-DEFAULT_BASE_CLASS: str = 'pydantic.BaseModel'
-
-
-def load_yaml(stream: Union[str, TextIO]) -> Any:
-    return yaml.load(stream, Loader=SafeLoader)
-
-
-def load_yaml_from_path(path: Path, encoding: str) -> Any:
-    with path.open(encoding=encoding) as f:
-        return load_yaml(f)
-
-
-if TYPE_CHECKING:
-
-    def get_version() -> str: ...
-
-else:
-
-    def get_version() -> str:
-        package = 'datamodel-code-generator'
-
-        from importlib.metadata import version
-
-        return version(package)
-
-
-def enable_debug_message() -> None:  # pragma: no cover
-    if not pysnooper:
-        raise Exception(
-            "Please run `$pip install 'datamodel-code-generator[debug]'` to use debug option"
-        )
-
-    pysnooper.tracer.DISABLED = False
-
-
-def snooper_to_methods(  # type: ignore
-    output=None,
-    watch=(),
-    watch_explode=(),
-    depth=1,
-    prefix='',
-    overwrite=False,
-    thread_info=False,
-    custom_repr=(),
-    max_variable_length=100,
-) -> Callable[..., Any]:
-    def inner(cls: Type[T]) -> Type[T]:
-        if not pysnooper:
-            return cls
-        import inspect
-
-        methods = inspect.getmembers(cls, predicate=inspect.isfunction)
-        for name, method in methods:
-            snooper_method = pysnooper.snoop(
-                output,
-                watch,
-                watch_explode,
-                depth,
-                prefix,
-                overwrite,
-                thread_info,
-                custom_repr,
-                max_variable_length,
-            )(method)
-            setattr(cls, name, snooper_method)
-        return cls
-
-    return inner
-
-
-@contextlib.contextmanager
-def chdir(path: Optional[Path]) -> Iterator[None]:
-    """Changes working directory and returns to previous on exit."""
-
-    if path is None:
-        yield
-    else:
-        prev_cwd = Path.cwd()
-        try:
-            os.chdir(path if path.is_dir() else path.parent)
-            yield
-        finally:
-            os.chdir(prev_cwd)
-
-
-def is_openapi(text: str) -> bool:
-    return 'openapi' in load_yaml(text)
-
-
-JSON_SCHEMA_URLS: Tuple[str, ...] = (
-    'http://json-schema.org/',
-    'https://json-schema.org/',
-)
-
-
-def is_schema(text: str) -> bool:
-    data = load_yaml(text)
-    if not isinstance(data, dict):
-        return False
-    schema = data.get('$schema')
-    if isinstance(schema, str) and any(
-        schema.startswith(u) for u in JSON_SCHEMA_URLS
-    ):  # pragma: no cover
-        return True
-    if isinstance(data.get('type'), str):
-        return True
-    if any(
-        isinstance(data.get(o), list)
-        for o in (
-            'allOf',
-            'anyOf',
-            'oneOf',
-        )
-    ):
-        return True
-    if isinstance(data.get('properties'), dict):
-        return True
-    return False
-
-
-class InputFileType(Enum):
-    Auto = 'auto'
-    OpenAPI = 'openapi'
-    JsonSchema = 'jsonschema'
-    Json = 'json'
-    Yaml = 'yaml'
-    Dict = 'dict'
-    CSV = 'csv'
-    GraphQL = 'graphql'
-
-
-RAW_DATA_TYPES: List[InputFileType] = [
-    InputFileType.Json,
-    InputFileType.Yaml,
-    InputFileType.Dict,
-    InputFileType.CSV,
-    InputFileType.GraphQL,
-]
-
-
-class DataModelType(Enum):
-    PydanticBaseModel = 'pydantic.BaseModel'
-    PydanticV2BaseModel = 'pydantic_v2.BaseModel'
-    DataclassesDataclass = 'dataclasses.dataclass'
-    TypingTypedDict = 'typing.TypedDict'
-    MsgspecStruct = 'msgspec.Struct'
-
-
-class OpenAPIScope(Enum):
-    Schemas = 'schemas'
-    Paths = 'paths'
-    Tags = 'tags'
-    Parameters = 'parameters'
-
-
-class GraphQLScope(Enum):
-    Schema = 'schema'
-
-
-class Error(Exception):
-    def __init__(self, message: str) -> None:
-        self.message: str = message
-
-    def __str__(self) -> str:
-        return self.message
-
-
-class InvalidClassNameError(Error):
-    def __init__(self, class_name: str) -> None:
-        self.class_name = class_name
-        message = f'title={repr(class_name)} is invalid class name.'
-        super().__init__(message=message)
-
-
-def get_first_file(path: Path) -> Path:  # pragma: no cover
-    if path.is_file():
-        return path
-    elif path.is_dir():
-        for child in path.rglob('*'):
-            if child.is_file():
-                return child
-    raise Error('File not found')
-
-
-def generate(
-    input_: Union[Path, str, ParseResult, Mapping[str, Any]],
-    *,
-    input_filename: Optional[str] = None,
-    input_file_type: InputFileType = InputFileType.Auto,
-    output: Optional[Path] = None,
-    output_model_type: DataModelType = DataModelType.PydanticBaseModel,
-    target_python_version: PythonVersion = PythonVersion.PY_38,
-    base_class: str = '',
-    additional_imports: Optional[List[str]] = None,
-    custom_template_dir: Optional[Path] = None,
-    extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-    validation: bool = False,
-    field_constraints: bool = False,
-    snake_case_field: bool = False,
-    strip_default_none: bool = False,
-    aliases: Optional[Mapping[str, str]] = None,
-    disable_timestamp: bool = False,
-    enable_version_header: bool = False,
-    allow_population_by_field_name: bool = False,
-    allow_extra_fields: bool = False,
-    apply_default_values_for_required_fields: bool = False,
-    force_optional_for_required_fields: bool = False,
-    class_name: Optional[str] = None,
-    use_standard_collections: bool = False,
-    use_schema_description: bool = False,
-    use_field_description: bool = False,
-    use_default_kwarg: bool = False,
-    reuse_model: bool = False,
-    encoding: str = 'utf-8',
-    enum_field_as_literal: Optional[LiteralType] = None,
-    use_one_literal_as_default: bool = False,
-    set_default_enum_member: bool = False,
-    use_subclass_enum: bool = False,
-    strict_nullable: bool = False,
-    use_generic_container_types: bool = False,
-    enable_faux_immutability: bool = False,
-    disable_appending_item_suffix: bool = False,
-    strict_types: Optional[Sequence[StrictTypes]] = None,
-    empty_enum_field_name: Optional[str] = None,
-    custom_class_name_generator: Optional[Callable[[str], str]] = None,
-    field_extra_keys: Optional[Set[str]] = None,
-    field_include_all_keys: bool = False,
-    field_extra_keys_without_x_prefix: Optional[Set[str]] = None,
-    openapi_scopes: Optional[List[OpenAPIScope]] = None,
-    graphql_scopes: Optional[List[GraphQLScope]] = None,
-    wrap_string_literal: Optional[bool] = None,
-    use_title_as_name: bool = False,
-    use_operation_id_as_name: bool = False,
-    use_unique_items_as_set: bool = False,
-    http_headers: Optional[Sequence[Tuple[str, str]]] = None,
-    http_ignore_tls: bool = False,
-    use_annotated: bool = False,
-    use_non_positive_negative_number_constrained_types: bool = False,
-    original_field_name_delimiter: Optional[str] = None,
-    use_double_quotes: bool = False,
-    use_union_operator: bool = False,
-    collapse_root_models: bool = False,
-    special_field_name_prefix: Optional[str] = None,
-    remove_special_field_name_prefix: bool = False,
-    capitalise_enum_members: bool = False,
-    keep_model_order: bool = False,
-    custom_file_header: Optional[str] = None,
-    custom_file_header_path: Optional[Path] = None,
-    custom_formatters: Optional[List[str]] = None,
-    custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
-    use_pendulum: bool = False,
-    http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
-    treat_dots_as_module: bool = False,
-    use_exact_imports: bool = False,
-    union_mode: Optional[UnionMode] = None,
-    output_datetime_class: Optional[DatetimeClassType] = None,
-    keyword_only: bool = False,
-    no_alias: bool = False,
-) -> None:
-    remote_text_cache: DefaultPutDict[str, str] = DefaultPutDict()
-    if isinstance(input_, str):
-        input_text: Optional[str] = input_
-    elif isinstance(input_, ParseResult):
-        from datamodel_code_generator.http import get_body
-
-        input_text = remote_text_cache.get_or_put(
-            input_.geturl(),
-            default_factory=lambda url: get_body(
-                url, http_headers, http_ignore_tls, http_query_parameters
-            ),
-        )
-    else:
-        input_text = None
-
-    if isinstance(input_, Path) and not input_.is_absolute():
-        input_ = input_.expanduser().resolve()
-    if input_file_type == InputFileType.Auto:
-        try:
-            input_text_ = (
-                get_first_file(input_).read_text(encoding=encoding)
-                if isinstance(input_, Path)
-                else input_text
-            )
-            assert isinstance(input_text_, str)
-            input_file_type = infer_input_type(input_text_)
-            print(
-                inferred_message.format(input_file_type.value),
-                file=sys.stderr,
-            )
-        except:  # noqa
-            raise Error('Invalid file format')
-
-    kwargs: Dict[str, Any] = {}
-    if input_file_type == InputFileType.OpenAPI:
-        from datamodel_code_generator.parser.openapi import OpenAPIParser
-
-        parser_class: Type[Parser] = OpenAPIParser
-        kwargs['openapi_scopes'] = openapi_scopes
-    elif input_file_type == InputFileType.GraphQL:
-        from datamodel_code_generator.parser.graphql import GraphQLParser
-
-        parser_class: Type[Parser] = GraphQLParser
-    else:
-        from datamodel_code_generator.parser.jsonschema import JsonSchemaParser
-
-        parser_class = JsonSchemaParser
-
-        if input_file_type in RAW_DATA_TYPES:
-            import json
-
-            try:
-                if isinstance(input_, Path) and input_.is_dir():  # pragma: no cover
-                    raise Error(f'Input must be a file for {input_file_type}')
-                obj: Dict[Any, Any]
-                if input_file_type == InputFileType.CSV:
-                    import csv
-
-                    def get_header_and_first_line(csv_file: IO[str]) -> Dict[str, Any]:
-                        csv_reader = csv.DictReader(csv_file)
-                        return dict(zip(csv_reader.fieldnames, next(csv_reader)))  # type: ignore
-
-                    if isinstance(input_, Path):
-                        with input_.open(encoding=encoding) as f:
-                            obj = get_header_and_first_line(f)
-                    else:
-                        import io
-
-                        obj = get_header_and_first_line(io.StringIO(input_text))
-                elif input_file_type == InputFileType.Yaml:
-                    obj = load_yaml(
-                        input_.read_text(encoding=encoding)  # type: ignore
-                        if isinstance(input_, Path)
-                        else input_text
-                    )
-                elif input_file_type == InputFileType.Json:
-                    obj = json.loads(
-                        input_.read_text(encoding=encoding)  # type: ignore
-                        if isinstance(input_, Path)
-                        else input_text
-                    )
-                elif input_file_type == InputFileType.Dict:
-                    import ast
-
-                    # Input can be a dict object stored in a python file
-                    obj = (
-                        ast.literal_eval(
-                            input_.read_text(encoding=encoding)  # type: ignore
-                        )
-                        if isinstance(input_, Path)
-                        else input_
-                    )
-                else:  # pragma: no cover
-                    raise Error(f'Unsupported input file type: {input_file_type}')
-            except:  # noqa
-                raise Error('Invalid file format')
-
-            from genson import SchemaBuilder
-
-            builder = SchemaBuilder()
-            builder.add_object(obj)
-            input_text = json.dumps(builder.to_schema())
-
-    if isinstance(input_, ParseResult) and input_file_type not in RAW_DATA_TYPES:
-        input_text = None
-
-    if union_mode is not None:
-        if output_model_type == DataModelType.PydanticV2BaseModel:
-            default_field_extras = {'union_mode': union_mode}
-        else:  # pragma: no cover
-            raise Error('union_mode is only supported for pydantic_v2.BaseModel')
-    else:
-        default_field_extras = None
-
-    from datamodel_code_generator.model import get_data_model_types
-
-    data_model_types = get_data_model_types(
-        output_model_type, target_python_version, output_datetime_class
-    )
-    parser = parser_class(
-        source=input_text or input_,
-        data_model_type=data_model_types.data_model,
-        data_model_root_type=data_model_types.root_model,
-        data_model_field_type=data_model_types.field_model,
-        data_type_manager_type=data_model_types.data_type_manager,
-        base_class=base_class,
-        additional_imports=additional_imports,
-        custom_template_dir=custom_template_dir,
-        extra_template_data=extra_template_data,
-        target_python_version=target_python_version,
-        dump_resolve_reference_action=data_model_types.dump_resolve_reference_action,
-        validation=validation,
-        field_constraints=field_constraints,
-        snake_case_field=snake_case_field,
-        strip_default_none=strip_default_none,
-        aliases=aliases,
-        allow_population_by_field_name=allow_population_by_field_name,
-        allow_extra_fields=allow_extra_fields,
-        apply_default_values_for_required_fields=apply_default_values_for_required_fields,
-        force_optional_for_required_fields=force_optional_for_required_fields,
-        class_name=class_name,
-        use_standard_collections=use_standard_collections,
-        base_path=input_.parent
-        if isinstance(input_, Path) and input_.is_file()
-        else None,
-        use_schema_description=use_schema_description,
-        use_field_description=use_field_description,
-        use_default_kwarg=use_default_kwarg,
-        reuse_model=reuse_model,
-        enum_field_as_literal=LiteralType.All
-        if output_model_type == DataModelType.TypingTypedDict
-        else enum_field_as_literal,
-        use_one_literal_as_default=use_one_literal_as_default,
-        set_default_enum_member=True
-        if output_model_type == DataModelType.DataclassesDataclass
-        else set_default_enum_member,
-        use_subclass_enum=use_subclass_enum,
-        strict_nullable=strict_nullable,
-        use_generic_container_types=use_generic_container_types,
-        enable_faux_immutability=enable_faux_immutability,
-        remote_text_cache=remote_text_cache,
-        disable_appending_item_suffix=disable_appending_item_suffix,
-        strict_types=strict_types,
-        empty_enum_field_name=empty_enum_field_name,
-        custom_class_name_generator=custom_class_name_generator,
-        field_extra_keys=field_extra_keys,
-        field_include_all_keys=field_include_all_keys,
-        field_extra_keys_without_x_prefix=field_extra_keys_without_x_prefix,
-        wrap_string_literal=wrap_string_literal,
-        use_title_as_name=use_title_as_name,
-        use_operation_id_as_name=use_operation_id_as_name,
-        use_unique_items_as_set=use_unique_items_as_set,
-        http_headers=http_headers,
-        http_ignore_tls=http_ignore_tls,
-        use_annotated=use_annotated,
-        use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
-        original_field_name_delimiter=original_field_name_delimiter,
-        use_double_quotes=use_double_quotes,
-        use_union_operator=use_union_operator,
-        collapse_root_models=collapse_root_models,
-        special_field_name_prefix=special_field_name_prefix,
-        remove_special_field_name_prefix=remove_special_field_name_prefix,
-        capitalise_enum_members=capitalise_enum_members,
-        keep_model_order=keep_model_order,
-        known_third_party=data_model_types.known_third_party,
-        custom_formatters=custom_formatters,
-        custom_formatters_kwargs=custom_formatters_kwargs,
-        use_pendulum=use_pendulum,
-        http_query_parameters=http_query_parameters,
-        treat_dots_as_module=treat_dots_as_module,
-        use_exact_imports=use_exact_imports,
-        default_field_extras=default_field_extras,
-        target_datetime_class=output_datetime_class,
-        keyword_only=keyword_only,
-        no_alias=no_alias,
-        **kwargs,
-    )
-
-    with chdir(output):
-        results = parser.parse()
-    if not input_filename:  # pragma: no cover
-        if isinstance(input_, str):
-            input_filename = '<stdin>'
-        elif isinstance(input_, ParseResult):
-            input_filename = input_.geturl()
-        elif input_file_type == InputFileType.Dict:
-            # input_ might be a dict object provided directly, and missing a name field
-            input_filename = getattr(input_, 'name', '<dict>')
-        else:
-            input_filename = input_.name
-    if not results:
-        raise Error('Models not found in the input data')
-    elif isinstance(results, str):
-        modules = {output: (results, input_filename)}
-    else:
-        if output is None:
-            raise Error('Modular references require an output directory')
-        if output.suffix:
-            raise Error('Modular references require an output directory, not a file')
-        modules = {
-            output.joinpath(*name): (
-                result.body,
-                str(result.source.as_posix() if result.source else input_filename),
-            )
-            for name, result in sorted(results.items())
-        }
-
-    timestamp = datetime.now(timezone.utc).replace(microsecond=0).isoformat()
-
-    if custom_file_header is None and custom_file_header_path:
-        custom_file_header = custom_file_header_path.read_text(encoding=encoding)
-
-    header = """\
-# generated by datamodel-codegen:
-#   filename:  {}"""
-    if not disable_timestamp:
-        header += f'\n#   timestamp: {timestamp}'
-    if enable_version_header:
-        header += f'\n#   version:   {get_version()}'
-
-    file: Optional[IO[Any]]
-    for path, (body, filename) in modules.items():
-        if path is None:
-            file = None
-        else:
-            if not path.parent.exists():
-                path.parent.mkdir(parents=True)
-            file = path.open('wt', encoding=encoding)
-
-        print(custom_file_header or header.format(filename), file=file)
-        if body:
-            print('', file=file)
-            print(body.rstrip(), file=file)
-
-        if file is not None:
-            file.close()
-
-
-def infer_input_type(text: str) -> InputFileType:
-    if is_openapi(text):
-        return InputFileType.OpenAPI
-    elif is_schema(text):
-        return InputFileType.JsonSchema
-    return InputFileType.Json
-
-
-inferred_message = (
-    'The input file type was determined to be: {}\nThis can be specified explicitly with the '
-    '`--input-file-type` option.'
-)
-
-__all__ = [
-    'DefaultPutDict',
-    'Error',
-    'InputFileType',
-    'InvalidClassNameError',
-    'LiteralType',
-    'PythonVersion',
-    'generate',
-]
diff -pruN 0.26.4-3/datamodel_code_generator/__main__.py 0.45.0-1/datamodel_code_generator/__main__.py
--- 0.26.4-3/datamodel_code_generator/__main__.py	2024-12-15 17:25:57.703037000 +0000
+++ 0.45.0-1/datamodel_code_generator/__main__.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,568 +0,0 @@
-#! /usr/bin/env python
-
-"""
-Main function.
-"""
-
-from __future__ import annotations
-
-import json
-import signal
-import sys
-import warnings
-from collections import defaultdict
-from enum import IntEnum
-from io import TextIOBase
-from pathlib import Path
-from typing import (
-    TYPE_CHECKING,
-    Any,
-    DefaultDict,
-    Dict,
-    List,
-    Optional,
-    Sequence,
-    Set,
-    Tuple,
-    Union,
-    cast,
-)
-from urllib.parse import ParseResult, urlparse
-
-import argcomplete
-import black
-from pydantic import BaseModel
-
-from datamodel_code_generator.model.pydantic_v2 import UnionMode
-
-if TYPE_CHECKING:
-    from argparse import Namespace
-
-    from typing_extensions import Self
-
-from datamodel_code_generator import (
-    DataModelType,
-    Error,
-    InputFileType,
-    InvalidClassNameError,
-    OpenAPIScope,
-    enable_debug_message,
-    generate,
-)
-from datamodel_code_generator.arguments import DEFAULT_ENCODING, arg_parser, namespace
-from datamodel_code_generator.format import (
-    DatetimeClassType,
-    PythonVersion,
-    black_find_project_root,
-    is_supported_in_black,
-)
-from datamodel_code_generator.parser import LiteralType
-from datamodel_code_generator.reference import is_url
-from datamodel_code_generator.types import StrictTypes
-from datamodel_code_generator.util import (
-    PYDANTIC_V2,
-    ConfigDict,
-    Model,
-    field_validator,
-    load_toml,
-    model_validator,
-)
-
-
-class Exit(IntEnum):
-    """Exit reasons."""
-
-    OK = 0
-    ERROR = 1
-    KeyboardInterrupt = 2
-
-
-def sig_int_handler(_: int, __: Any) -> None:  # pragma: no cover
-    exit(Exit.OK)
-
-
-signal.signal(signal.SIGINT, sig_int_handler)
-
-
-class Config(BaseModel):
-    if PYDANTIC_V2:
-        model_config = ConfigDict(arbitrary_types_allowed=True)
-
-        def get(self, item: str) -> Any:
-            return getattr(self, item)
-
-        def __getitem__(self, item: str) -> Any:
-            return self.get(item)
-
-        if TYPE_CHECKING:
-
-            @classmethod
-            def get_fields(cls) -> Dict[str, Any]: ...
-
-        else:
-
-            @classmethod
-            def parse_obj(cls: type[Model], obj: Any) -> Model:
-                return cls.model_validate(obj)
-
-            @classmethod
-            def get_fields(cls) -> Dict[str, Any]:
-                return cls.model_fields
-
-    else:
-
-        class Config:
-            # validate_assignment = True
-            # Pydantic 1.5.1 doesn't support validate_assignment correctly
-            arbitrary_types_allowed = (TextIOBase,)
-
-        if not TYPE_CHECKING:
-
-            @classmethod
-            def get_fields(cls) -> Dict[str, Any]:
-                return cls.__fields__
-
-    @field_validator(
-        'aliases', 'extra_template_data', 'custom_formatters_kwargs', mode='before'
-    )
-    def validate_file(cls, value: Any) -> Optional[TextIOBase]:
-        if value is None or isinstance(value, TextIOBase):
-            return value
-        return cast(TextIOBase, Path(value).expanduser().resolve().open('rt'))
-
-    @field_validator(
-        'input',
-        'output',
-        'custom_template_dir',
-        'custom_file_header_path',
-        mode='before',
-    )
-    def validate_path(cls, value: Any) -> Optional[Path]:
-        if value is None or isinstance(value, Path):
-            return value  # pragma: no cover
-        return Path(value).expanduser().resolve()
-
-    @field_validator('url', mode='before')
-    def validate_url(cls, value: Any) -> Optional[ParseResult]:
-        if isinstance(value, str) and is_url(value):  # pragma: no cover
-            return urlparse(value)
-        elif value is None:  # pragma: no cover
-            return None
-        raise Error(
-            f"This protocol doesn't support only http/https. --input={value}"
-        )  # pragma: no cover
-
-    @model_validator(mode='after')
-    def validate_use_generic_container_types(
-        cls, values: Dict[str, Any]
-    ) -> Dict[str, Any]:
-        if values.get('use_generic_container_types'):
-            target_python_version: PythonVersion = values['target_python_version']
-            if target_python_version == target_python_version.PY_36:
-                raise Error(
-                    f'`--use-generic-container-types` can not be used with `--target-python-version` {target_python_version.PY_36.value}.\n'
-                    ' The version will be not supported in a future version'
-                )
-        return values
-
-    @model_validator(mode='after')
-    def validate_original_field_name_delimiter(
-        cls, values: Dict[str, Any]
-    ) -> Dict[str, Any]:
-        if values.get('original_field_name_delimiter') is not None:
-            if not values.get('snake_case_field'):
-                raise Error(
-                    '`--original-field-name-delimiter` can not be used without `--snake-case-field`.'
-                )
-        return values
-
-    @model_validator(mode='after')
-    def validate_custom_file_header(cls, values: Dict[str, Any]) -> Dict[str, Any]:
-        if values.get('custom_file_header') and values.get('custom_file_header_path'):
-            raise Error(
-                '`--custom_file_header_path` can not be used with `--custom_file_header`.'
-            )  # pragma: no cover
-        return values
-
-    @model_validator(mode='after')
-    def validate_keyword_only(cls, values: Dict[str, Any]) -> Dict[str, Any]:
-        output_model_type: DataModelType = values.get('output_model_type')
-        python_target: PythonVersion = values.get('target_python_version')
-        if (
-            values.get('keyword_only')
-            and output_model_type == DataModelType.DataclassesDataclass
-            and not python_target.has_kw_only_dataclass
-        ):
-            raise Error(
-                f'`--keyword-only` requires `--target-python-version` {PythonVersion.PY_310.value} or higher.'
-            )
-        return values
-
-    @model_validator(mode='after')
-    def validate_output_datetime_class(cls, values: Dict[str, Any]) -> Dict[str, Any]:
-        datetime_class_type: Optional[DatetimeClassType] = values.get(
-            'output_datetime_class'
-        )
-        if (
-            datetime_class_type
-            and datetime_class_type is not DatetimeClassType.Datetime
-            and values.get('output_model_type') == DataModelType.DataclassesDataclass
-        ):
-            raise Error(
-                '`--output-datetime-class` only allows "datetime" for '
-                f'`--output-model-type` {DataModelType.DataclassesDataclass.value}'
-            )
-        return values
-
-    # Pydantic 1.5.1 doesn't support each_item=True correctly
-    @field_validator('http_headers', mode='before')
-    def validate_http_headers(cls, value: Any) -> Optional[List[Tuple[str, str]]]:
-        def validate_each_item(each_item: Any) -> Tuple[str, str]:
-            if isinstance(each_item, str):  # pragma: no cover
-                try:
-                    field_name, field_value = each_item.split(':', maxsplit=1)  # type: str, str
-                    return field_name, field_value.lstrip()
-                except ValueError:
-                    raise Error(f'Invalid http header: {each_item!r}')
-            return each_item  # pragma: no cover
-
-        if isinstance(value, list):
-            return [validate_each_item(each_item) for each_item in value]
-        return value  # pragma: no cover
-
-    @field_validator('http_query_parameters', mode='before')
-    def validate_http_query_parameters(
-        cls, value: Any
-    ) -> Optional[List[Tuple[str, str]]]:
-        def validate_each_item(each_item: Any) -> Tuple[str, str]:
-            if isinstance(each_item, str):  # pragma: no cover
-                try:
-                    field_name, field_value = each_item.split('=', maxsplit=1)  # type: str, str
-                    return field_name, field_value.lstrip()
-                except ValueError:
-                    raise Error(f'Invalid http query parameter: {each_item!r}')
-            return each_item  # pragma: no cover
-
-        if isinstance(value, list):
-            return [validate_each_item(each_item) for each_item in value]
-        return value  # pragma: no cover
-
-    @model_validator(mode='before')
-    def validate_additional_imports(cls, values: Dict[str, Any]) -> Dict[str, Any]:
-        if values.get('additional_imports') is not None:
-            values['additional_imports'] = values.get('additional_imports').split(',')
-        return values
-
-    @model_validator(mode='before')
-    def validate_custom_formatters(cls, values: Dict[str, Any]) -> Dict[str, Any]:
-        if values.get('custom_formatters') is not None:
-            values['custom_formatters'] = values.get('custom_formatters').split(',')
-        return values
-
-    if PYDANTIC_V2:
-
-        @model_validator(mode='after')  # type: ignore
-        def validate_root(self: Self) -> Self:
-            if self.use_annotated:
-                self.field_constraints = True
-            return self
-
-    else:
-
-        @model_validator(mode='after')
-        def validate_root(cls, values: Any) -> Any:
-            if values.get('use_annotated'):
-                values['field_constraints'] = True
-            return values
-
-    input: Optional[Union[Path, str]] = None
-    input_file_type: InputFileType = InputFileType.Auto
-    output_model_type: DataModelType = DataModelType.PydanticBaseModel
-    output: Optional[Path] = None
-    debug: bool = False
-    disable_warnings: bool = False
-    target_python_version: PythonVersion = PythonVersion.PY_38
-    base_class: str = ''
-    additional_imports: Optional[List[str]] = (None,)
-    custom_template_dir: Optional[Path] = None
-    extra_template_data: Optional[TextIOBase] = None
-    validation: bool = False
-    field_constraints: bool = False
-    snake_case_field: bool = False
-    strip_default_none: bool = False
-    aliases: Optional[TextIOBase] = None
-    disable_timestamp: bool = False
-    enable_version_header: bool = False
-    allow_population_by_field_name: bool = False
-    allow_extra_fields: bool = False
-    use_default: bool = False
-    force_optional: bool = False
-    class_name: Optional[str] = None
-    use_standard_collections: bool = False
-    use_schema_description: bool = False
-    use_field_description: bool = False
-    use_default_kwarg: bool = False
-    reuse_model: bool = False
-    encoding: str = DEFAULT_ENCODING
-    enum_field_as_literal: Optional[LiteralType] = None
-    use_one_literal_as_default: bool = False
-    set_default_enum_member: bool = False
-    use_subclass_enum: bool = False
-    strict_nullable: bool = False
-    use_generic_container_types: bool = False
-    use_union_operator: bool = False
-    enable_faux_immutability: bool = False
-    url: Optional[ParseResult] = None
-    disable_appending_item_suffix: bool = False
-    strict_types: List[StrictTypes] = []
-    empty_enum_field_name: Optional[str] = None
-    field_extra_keys: Optional[Set[str]] = None
-    field_include_all_keys: bool = False
-    field_extra_keys_without_x_prefix: Optional[Set[str]] = None
-    openapi_scopes: Optional[List[OpenAPIScope]] = [OpenAPIScope.Schemas]
-    wrap_string_literal: Optional[bool] = None
-    use_title_as_name: bool = False
-    use_operation_id_as_name: bool = False
-    use_unique_items_as_set: bool = False
-    http_headers: Optional[Sequence[Tuple[str, str]]] = None
-    http_ignore_tls: bool = False
-    use_annotated: bool = False
-    use_non_positive_negative_number_constrained_types: bool = False
-    original_field_name_delimiter: Optional[str] = None
-    use_double_quotes: bool = False
-    collapse_root_models: bool = False
-    special_field_name_prefix: Optional[str] = None
-    remove_special_field_name_prefix: bool = False
-    capitalise_enum_members: bool = False
-    keep_model_order: bool = False
-    custom_file_header: Optional[str] = None
-    custom_file_header_path: Optional[Path] = None
-    custom_formatters: Optional[List[str]] = None
-    custom_formatters_kwargs: Optional[TextIOBase] = None
-    use_pendulum: bool = False
-    http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None
-    treat_dot_as_module: bool = False
-    use_exact_imports: bool = False
-    union_mode: Optional[UnionMode] = None
-    output_datetime_class: Optional[DatetimeClassType] = None
-    keyword_only: bool = False
-    no_alias: bool = False
-
-    def merge_args(self, args: Namespace) -> None:
-        set_args = {
-            f: getattr(args, f)
-            for f in self.get_fields()
-            if getattr(args, f) is not None
-        }
-
-        if set_args.get('output_model_type') == DataModelType.MsgspecStruct.value:
-            set_args['use_annotated'] = True
-
-        if set_args.get('use_annotated'):
-            set_args['field_constraints'] = True
-
-        parsed_args = Config.parse_obj(set_args)
-        for field_name in set_args:
-            setattr(self, field_name, getattr(parsed_args, field_name))
-
-
-def main(args: Optional[Sequence[str]] = None) -> Exit:
-    """Main function."""
-
-    # add cli completion support
-    argcomplete.autocomplete(arg_parser)
-
-    if args is None:  # pragma: no cover
-        args = sys.argv[1:]
-
-    arg_parser.parse_args(args, namespace=namespace)
-
-    if namespace.version:
-        from datamodel_code_generator.version import version
-
-        print(version)
-        exit(0)
-
-    root = black_find_project_root((Path().resolve(),))
-    pyproject_toml_path = root / 'pyproject.toml'
-    if pyproject_toml_path.is_file():
-        pyproject_toml: Dict[str, Any] = {
-            k.replace('-', '_'): v
-            for k, v in load_toml(pyproject_toml_path)
-            .get('tool', {})
-            .get('datamodel-codegen', {})
-            .items()
-        }
-    else:
-        pyproject_toml = {}
-
-    try:
-        config = Config.parse_obj(pyproject_toml)
-        config.merge_args(namespace)
-    except Error as e:
-        print(e.message, file=sys.stderr)
-        return Exit.ERROR
-
-    if not config.input and not config.url and sys.stdin.isatty():
-        print(
-            'Not Found Input: require `stdin` or arguments `--input` or `--url`',
-            file=sys.stderr,
-        )
-        arg_parser.print_help()
-        return Exit.ERROR
-
-    if not is_supported_in_black(config.target_python_version):  # pragma: no cover
-        print(
-            f"Installed black doesn't support Python version {config.target_python_version.value}.\n"  # type: ignore
-            f'You have to install a newer black.\n'
-            f'Installed black version: {black.__version__}',
-            file=sys.stderr,
-        )
-        return Exit.ERROR
-
-    if config.debug:  # pragma: no cover
-        enable_debug_message()
-
-    if config.disable_warnings:
-        warnings.simplefilter('ignore')
-    extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]]
-    if config.extra_template_data is None:
-        extra_template_data = None
-    else:
-        with config.extra_template_data as data:
-            try:
-                extra_template_data = json.load(
-                    data, object_hook=lambda d: defaultdict(dict, **d)
-                )
-            except json.JSONDecodeError as e:
-                print(f'Unable to load extra template data: {e}', file=sys.stderr)
-                return Exit.ERROR
-
-    if config.aliases is None:
-        aliases = None
-    else:
-        with config.aliases as data:
-            try:
-                aliases = json.load(data)
-            except json.JSONDecodeError as e:
-                print(f'Unable to load alias mapping: {e}', file=sys.stderr)
-                return Exit.ERROR
-        if not isinstance(aliases, dict) or not all(
-            isinstance(k, str) and isinstance(v, str) for k, v in aliases.items()
-        ):
-            print(
-                'Alias mapping must be a JSON string mapping (e.g. {"from": "to", ...})',
-                file=sys.stderr,
-            )
-            return Exit.ERROR
-
-    if config.custom_formatters_kwargs is None:
-        custom_formatters_kwargs = None
-    else:
-        with config.custom_formatters_kwargs as data:
-            try:
-                custom_formatters_kwargs = json.load(data)
-            except json.JSONDecodeError as e:  # pragma: no cover
-                print(
-                    f'Unable to load custom_formatters_kwargs mapping: {e}',
-                    file=sys.stderr,
-                )
-                return Exit.ERROR
-        if not isinstance(custom_formatters_kwargs, dict) or not all(
-            isinstance(k, str) and isinstance(v, str)
-            for k, v in custom_formatters_kwargs.items()
-        ):  # pragma: no cover
-            print(
-                'Custom formatters kwargs mapping must be a JSON string mapping (e.g. {"from": "to", ...})',
-                file=sys.stderr,
-            )
-            return Exit.ERROR
-
-    try:
-        generate(
-            input_=config.url or config.input or sys.stdin.read(),
-            input_file_type=config.input_file_type,
-            output=config.output,
-            output_model_type=config.output_model_type,
-            target_python_version=config.target_python_version,
-            base_class=config.base_class,
-            additional_imports=config.additional_imports,
-            custom_template_dir=config.custom_template_dir,
-            validation=config.validation,
-            field_constraints=config.field_constraints,
-            snake_case_field=config.snake_case_field,
-            strip_default_none=config.strip_default_none,
-            extra_template_data=extra_template_data,
-            aliases=aliases,
-            disable_timestamp=config.disable_timestamp,
-            enable_version_header=config.enable_version_header,
-            allow_population_by_field_name=config.allow_population_by_field_name,
-            allow_extra_fields=config.allow_extra_fields,
-            apply_default_values_for_required_fields=config.use_default,
-            force_optional_for_required_fields=config.force_optional,
-            class_name=config.class_name,
-            use_standard_collections=config.use_standard_collections,
-            use_schema_description=config.use_schema_description,
-            use_field_description=config.use_field_description,
-            use_default_kwarg=config.use_default_kwarg,
-            reuse_model=config.reuse_model,
-            encoding=config.encoding,
-            enum_field_as_literal=config.enum_field_as_literal,
-            use_one_literal_as_default=config.use_one_literal_as_default,
-            set_default_enum_member=config.set_default_enum_member,
-            use_subclass_enum=config.use_subclass_enum,
-            strict_nullable=config.strict_nullable,
-            use_generic_container_types=config.use_generic_container_types,
-            enable_faux_immutability=config.enable_faux_immutability,
-            disable_appending_item_suffix=config.disable_appending_item_suffix,
-            strict_types=config.strict_types,
-            empty_enum_field_name=config.empty_enum_field_name,
-            field_extra_keys=config.field_extra_keys,
-            field_include_all_keys=config.field_include_all_keys,
-            field_extra_keys_without_x_prefix=config.field_extra_keys_without_x_prefix,
-            openapi_scopes=config.openapi_scopes,
-            wrap_string_literal=config.wrap_string_literal,
-            use_title_as_name=config.use_title_as_name,
-            use_operation_id_as_name=config.use_operation_id_as_name,
-            use_unique_items_as_set=config.use_unique_items_as_set,
-            http_headers=config.http_headers,
-            http_ignore_tls=config.http_ignore_tls,
-            use_annotated=config.use_annotated,
-            use_non_positive_negative_number_constrained_types=config.use_non_positive_negative_number_constrained_types,
-            original_field_name_delimiter=config.original_field_name_delimiter,
-            use_double_quotes=config.use_double_quotes,
-            collapse_root_models=config.collapse_root_models,
-            use_union_operator=config.use_union_operator,
-            special_field_name_prefix=config.special_field_name_prefix,
-            remove_special_field_name_prefix=config.remove_special_field_name_prefix,
-            capitalise_enum_members=config.capitalise_enum_members,
-            keep_model_order=config.keep_model_order,
-            custom_file_header=config.custom_file_header,
-            custom_file_header_path=config.custom_file_header_path,
-            custom_formatters=config.custom_formatters,
-            custom_formatters_kwargs=custom_formatters_kwargs,
-            use_pendulum=config.use_pendulum,
-            http_query_parameters=config.http_query_parameters,
-            treat_dots_as_module=config.treat_dot_as_module,
-            use_exact_imports=config.use_exact_imports,
-            union_mode=config.union_mode,
-            output_datetime_class=config.output_datetime_class,
-            keyword_only=config.keyword_only,
-            no_alias=config.no_alias,
-        )
-        return Exit.OK
-    except InvalidClassNameError as e:
-        print(f'{e} You have to set `--class-name` option', file=sys.stderr)
-        return Exit.ERROR
-    except Error as e:
-        print(str(e), file=sys.stderr)
-        return Exit.ERROR
-    except Exception:
-        import traceback
-
-        print(traceback.format_exc(), file=sys.stderr)
-        return Exit.ERROR
-
-
-if __name__ == '__main__':
-    sys.exit(main())
diff -pruN 0.26.4-3/datamodel_code_generator/arguments.py 0.45.0-1/datamodel_code_generator/arguments.py
--- 0.26.4-3/datamodel_code_generator/arguments.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.45.0-1/datamodel_code_generator/arguments.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,526 +0,0 @@
-from __future__ import annotations
-
-import locale
-from argparse import ArgumentParser, FileType, HelpFormatter, Namespace
-from operator import attrgetter
-from typing import TYPE_CHECKING
-
-from datamodel_code_generator import DataModelType, InputFileType, OpenAPIScope
-from datamodel_code_generator.format import DatetimeClassType, PythonVersion
-from datamodel_code_generator.model.pydantic_v2 import UnionMode
-from datamodel_code_generator.parser import LiteralType
-from datamodel_code_generator.types import StrictTypes
-
-if TYPE_CHECKING:
-    from argparse import Action
-    from typing import Iterable, Optional
-
-DEFAULT_ENCODING = locale.getpreferredencoding()
-
-namespace = Namespace(no_color=False)
-
-
-class SortingHelpFormatter(HelpFormatter):
-    def _bold_cyan(self, text: str) -> str:
-        return f'\x1b[36;1m{text}\x1b[0m'
-
-    def add_arguments(self, actions: Iterable[Action]) -> None:
-        actions = sorted(actions, key=attrgetter('option_strings'))
-        super().add_arguments(actions)
-
-    def start_section(self, heading: Optional[str]) -> None:
-        return super().start_section(
-            heading if namespace.no_color or not heading else self._bold_cyan(heading)
-        )
-
-
-arg_parser = ArgumentParser(
-    usage='\n  datamodel-codegen [options]',
-    description='Generate Python data models from schema definitions or structured data',
-    formatter_class=SortingHelpFormatter,
-    add_help=False,
-)
-
-base_options = arg_parser.add_argument_group('Options')
-typing_options = arg_parser.add_argument_group('Typing customization')
-field_options = arg_parser.add_argument_group('Field customization')
-model_options = arg_parser.add_argument_group('Model customization')
-template_options = arg_parser.add_argument_group('Template customization')
-openapi_options = arg_parser.add_argument_group('OpenAPI-only options')
-general_options = arg_parser.add_argument_group('General options')
-
-# ======================================================================================
-# Base options for input/output
-# ======================================================================================
-base_options.add_argument(
-    '--http-headers',
-    nargs='+',
-    metavar='HTTP_HEADER',
-    help='Set headers in HTTP requests to the remote host. (example: "Authorization: Basic dXNlcjpwYXNz")',
-)
-base_options.add_argument(
-    '--http-query-parameters',
-    nargs='+',
-    metavar='HTTP_QUERY_PARAMETERS',
-    help='Set query parameters in HTTP requests to the remote host. (example: "ref=branch")',
-)
-base_options.add_argument(
-    '--http-ignore-tls',
-    help="Disable verification of the remote host's TLS certificate",
-    action='store_true',
-    default=None,
-)
-base_options.add_argument(
-    '--input',
-    help='Input file/directory (default: stdin)',
-)
-base_options.add_argument(
-    '--input-file-type',
-    help='Input file type (default: auto)',
-    choices=[i.value for i in InputFileType],
-)
-base_options.add_argument(
-    '--output',
-    help='Output file (default: stdout)',
-)
-base_options.add_argument(
-    '--output-model-type',
-    help='Output model type (default: pydantic.BaseModel)',
-    choices=[i.value for i in DataModelType],
-)
-base_options.add_argument(
-    '--url',
-    help='Input file URL. `--input` is ignored when `--url` is used',
-)
-
-# ======================================================================================
-# Customization options for generated models
-# ======================================================================================
-model_options.add_argument(
-    '--allow-extra-fields',
-    help='Allow to pass extra fields, if this flag is not passed, extra fields are forbidden.',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--allow-population-by-field-name',
-    help='Allow population by field name',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--class-name',
-    help='Set class name of root model',
-    default=None,
-)
-model_options.add_argument(
-    '--collapse-root-models',
-    action='store_true',
-    default=None,
-    help='Models generated with a root-type field will be merged '
-    'into the models using that root-type model',
-)
-model_options.add_argument(
-    '--disable-appending-item-suffix',
-    help='Disable appending `Item` suffix to model name in an array',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--disable-timestamp',
-    help='Disable timestamp on file headers',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--enable-faux-immutability',
-    help='Enable faux immutability',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--enable-version-header',
-    help='Enable package version on file headers',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--keep-model-order',
-    help="Keep generated models' order",
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--keyword-only',
-    help='Defined models as keyword only (for example dataclass(kw_only=True)).',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--reuse-model',
-    help='Reuse models on the field when a module has the model with the same content',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--target-python-version',
-    help='target python version (default: 3.8)',
-    choices=[v.value for v in PythonVersion],
-)
-model_options.add_argument(
-    '--treat-dot-as-module',
-    help='treat dotted module names as modules',
-    action='store_true',
-    default=False,
-)
-model_options.add_argument(
-    '--use-schema-description',
-    help='Use schema description to populate class docstring',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--use-title-as-name',
-    help='use titles as class names of models',
-    action='store_true',
-    default=None,
-)
-model_options.add_argument(
-    '--use-pendulum',
-    help='use pendulum instead of datetime',
-    action='store_true',
-    default=False,
-)
-model_options.add_argument(
-    '--use-exact-imports',
-    help='import exact types instead of modules, for example: "from .foo import Bar" instead of '
-    '"from . import foo" with "foo.Bar"',
-    action='store_true',
-    default=False,
-)
-model_options.add_argument(
-    '--output-datetime-class',
-    help='Choose Datetime class between AwareDatetime, NaiveDatetime or datetime. '
-    'Each output model has its default mapping (for example pydantic: datetime, dataclass: str, ...)',
-    choices=[i.value for i in DatetimeClassType],
-    default=None,
-)
-
-# ======================================================================================
-# Typing options for generated models
-# ======================================================================================
-typing_options.add_argument(
-    '--base-class',
-    help='Base Class (default: pydantic.BaseModel)',
-    type=str,
-)
-typing_options.add_argument(
-    '--enum-field-as-literal',
-    help='Parse enum field as literal. '
-    'all: all enum field type are Literal. '
-    'one: field type is Literal when an enum has only one possible value',
-    choices=[lt.value for lt in LiteralType],
-    default=None,
-)
-typing_options.add_argument(
-    '--field-constraints',
-    help='Use field constraints and not con* annotations',
-    action='store_true',
-    default=None,
-)
-typing_options.add_argument(
-    '--set-default-enum-member',
-    help='Set enum members as default values for enum field',
-    action='store_true',
-    default=None,
-)
-typing_options.add_argument(
-    '--strict-types',
-    help='Use strict types',
-    choices=[t.value for t in StrictTypes],
-    nargs='+',
-)
-typing_options.add_argument(
-    '--use-annotated',
-    help='Use typing.Annotated for Field(). Also, `--field-constraints` option will be enabled.',
-    action='store_true',
-    default=None,
-)
-typing_options.add_argument(
-    '--use-generic-container-types',
-    help='Use generic container types for type hinting (typing.Sequence, typing.Mapping). '
-    'If `--use-standard-collections` option is set, then import from collections.abc instead of typing',
-    action='store_true',
-    default=None,
-)
-typing_options.add_argument(
-    '--use-non-positive-negative-number-constrained-types',
-    help='Use the Non{Positive,Negative}{FloatInt} types instead of the corresponding con* constrained types.',
-    action='store_true',
-    default=None,
-)
-typing_options.add_argument(
-    '--use-one-literal-as-default',
-    help='Use one literal as default value for one literal field',
-    action='store_true',
-    default=None,
-)
-typing_options.add_argument(
-    '--use-standard-collections',
-    help='Use standard collections for type hinting (list, dict)',
-    action='store_true',
-    default=None,
-)
-typing_options.add_argument(
-    '--use-subclass-enum',
-    help='Define Enum class as subclass with field type when enum has type (int, float, bytes, str)',
-    action='store_true',
-    default=None,
-)
-typing_options.add_argument(
-    '--use-union-operator',
-    help='Use | operator for Union type (PEP 604).',
-    action='store_true',
-    default=None,
-)
-typing_options.add_argument(
-    '--use-unique-items-as-set',
-    help='define field type as `set` when the field attribute has `uniqueItems`',
-    action='store_true',
-    default=None,
-)
-
-# ======================================================================================
-# Customization options for generated model fields
-# ======================================================================================
-field_options.add_argument(
-    '--capitalise-enum-members',
-    '--capitalize-enum-members',
-    help='Capitalize field names on enum',
-    action='store_true',
-    default=None,
-)
-field_options.add_argument(
-    '--empty-enum-field-name',
-    help='Set field name when enum value is empty (default:  `_`)',
-    default=None,
-)
-field_options.add_argument(
-    '--field-extra-keys',
-    help='Add extra keys to field parameters',
-    type=str,
-    nargs='+',
-)
-field_options.add_argument(
-    '--field-extra-keys-without-x-prefix',
-    help='Add extra keys with `x-` prefix to field parameters. The extra keys are stripped of the `x-` prefix.',
-    type=str,
-    nargs='+',
-)
-field_options.add_argument(
-    '--field-include-all-keys',
-    help='Add all keys to field parameters',
-    action='store_true',
-    default=None,
-)
-field_options.add_argument(
-    '--force-optional',
-    help='Force optional for required fields',
-    action='store_true',
-    default=None,
-)
-field_options.add_argument(
-    '--original-field-name-delimiter',
-    help='Set delimiter to convert to snake case. This option only can be used with --snake-case-field (default: `_` )',
-    default=None,
-)
-field_options.add_argument(
-    '--remove-special-field-name-prefix',
-    help='Remove field name prefix if it has a special meaning e.g. underscores',
-    action='store_true',
-    default=None,
-)
-field_options.add_argument(
-    '--snake-case-field',
-    help='Change camel-case field name to snake-case',
-    action='store_true',
-    default=None,
-)
-field_options.add_argument(
-    '--special-field-name-prefix',
-    help="Set field name prefix when first character can't be used as Python field name (default:  `field`)",
-    default=None,
-)
-field_options.add_argument(
-    '--strip-default-none',
-    help='Strip default None on fields',
-    action='store_true',
-    default=None,
-)
-field_options.add_argument(
-    '--use-default',
-    help='Use default value even if a field is required',
-    action='store_true',
-    default=None,
-)
-field_options.add_argument(
-    '--use-default-kwarg',
-    action='store_true',
-    help='Use `default=` instead of a positional argument for Fields that have default values.',
-    default=None,
-)
-field_options.add_argument(
-    '--use-field-description',
-    help='Use schema description to populate field docstring',
-    action='store_true',
-    default=None,
-)
-field_options.add_argument(
-    '--union-mode',
-    help='Union mode for only pydantic v2 field',
-    choices=[u.value for u in UnionMode],
-    default=None,
-)
-field_options.add_argument(
-    '--no-alias',
-    help="""Do not add a field alias. E.g., if --snake-case-field is used along with a base class, which has an 
-            alias_generator""",
-    action='store_true',
-    default=None,
-)
-
-# ======================================================================================
-# Options for templating output
-# ======================================================================================
-template_options.add_argument(
-    '--aliases',
-    help='Alias mapping file',
-    type=FileType('rt'),
-)
-template_options.add_argument(
-    '--custom-file-header',
-    help='Custom file header',
-    type=str,
-    default=None,
-)
-template_options.add_argument(
-    '--custom-file-header-path',
-    help='Custom file header file path',
-    default=None,
-    type=str,
-)
-template_options.add_argument(
-    '--custom-template-dir',
-    help='Custom template directory',
-    type=str,
-)
-template_options.add_argument(
-    '--encoding',
-    help=f'The encoding of input and output (default: {DEFAULT_ENCODING})',
-    default=None,
-)
-template_options.add_argument(
-    '--extra-template-data',
-    help='Extra template data',
-    type=FileType('rt'),
-)
-template_options.add_argument(
-    '--use-double-quotes',
-    action='store_true',
-    default=None,
-    help='Model generated with double quotes. Single quotes or '
-    'your black config skip_string_normalization value will be used without this option.',
-)
-template_options.add_argument(
-    '--wrap-string-literal',
-    help='Wrap string literal by using black `experimental-string-processing` option (require black 20.8b0 or later)',
-    action='store_true',
-    default=None,
-)
-base_options.add_argument(
-    '--additional-imports',
-    help='Custom imports for output (delimited list input). For example "datetime.date,datetime.datetime"',
-    type=str,
-    default=None,
-)
-base_options.add_argument(
-    '--custom-formatters',
-    help='List of modules with custom formatter (delimited list input).',
-    type=str,
-    default=None,
-)
-template_options.add_argument(
-    '--custom-formatters-kwargs',
-    help='A file with kwargs for custom formatters.',
-    type=FileType('rt'),
-)
-
-# ======================================================================================
-# Options specific to OpenAPI input schemas
-# ======================================================================================
-openapi_options.add_argument(
-    '--openapi-scopes',
-    help='Scopes of OpenAPI model generation (default: schemas)',
-    choices=[o.value for o in OpenAPIScope],
-    nargs='+',
-    default=None,
-)
-openapi_options.add_argument(
-    '--strict-nullable',
-    help='Treat default field as a non-nullable field (Only OpenAPI)',
-    action='store_true',
-    default=None,
-)
-openapi_options.add_argument(
-    '--use-operation-id-as-name',
-    help='use operation id of OpenAPI as class names of models',
-    action='store_true',
-    default=None,
-)
-openapi_options.add_argument(
-    '--validation',
-    help='Deprecated: Enable validation (Only OpenAPI). this option is deprecated. it will be removed in future '
-    'releases',
-    action='store_true',
-    default=None,
-)
-
-# ======================================================================================
-# General options
-# ======================================================================================
-general_options.add_argument(
-    '--debug',
-    help='show debug message (require "debug". `$ pip install \'datamodel-code-generator[debug]\'`)',
-    action='store_true',
-    default=None,
-)
-general_options.add_argument(
-    '--disable-warnings',
-    help='disable warnings',
-    action='store_true',
-    default=None,
-)
-general_options.add_argument(
-    '-h',
-    '--help',
-    action='help',
-    default='==SUPPRESS==',
-    help='show this help message and exit',
-)
-general_options.add_argument(
-    '--no-color',
-    action='store_true',
-    default=False,
-    help='disable colorized output',
-)
-general_options.add_argument(
-    '--version',
-    action='store_true',
-    help='show version',
-)
-
-__all__ = [
-    'arg_parser',
-    'DEFAULT_ENCODING',
-    'namespace',
-]
diff -pruN 0.26.4-3/datamodel_code_generator/format.py 0.45.0-1/datamodel_code_generator/format.py
--- 0.26.4-3/datamodel_code_generator/format.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.45.0-1/datamodel_code_generator/format.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,277 +0,0 @@
-from __future__ import annotations
-
-from enum import Enum
-from importlib import import_module
-from pathlib import Path
-from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence
-from warnings import warn
-
-import black
-import isort
-
-from datamodel_code_generator.util import cached_property, load_toml
-
-try:
-    import black.mode
-except ImportError:  # pragma: no cover
-    black.mode = None
-
-
-class DatetimeClassType(Enum):
-    Datetime = 'datetime'
-    Awaredatetime = 'AwareDatetime'
-    Naivedatetime = 'NaiveDatetime'
-
-
-class PythonVersion(Enum):
-    PY_36 = '3.6'
-    PY_37 = '3.7'
-    PY_38 = '3.8'
-    PY_39 = '3.9'
-    PY_310 = '3.10'
-    PY_311 = '3.11'
-    PY_312 = '3.12'
-    PY_313 = '3.13'
-
-    @cached_property
-    def _is_py_38_or_later(self) -> bool:  # pragma: no cover
-        return self.value not in {self.PY_36.value, self.PY_37.value}  # type: ignore
-
-    @cached_property
-    def _is_py_39_or_later(self) -> bool:  # pragma: no cover
-        return self.value not in {self.PY_36.value, self.PY_37.value, self.PY_38.value}  # type: ignore
-
-    @cached_property
-    def _is_py_310_or_later(self) -> bool:  # pragma: no cover
-        return self.value not in {
-            self.PY_36.value,
-            self.PY_37.value,
-            self.PY_38.value,
-            self.PY_39.value,
-        }  # type: ignore
-
-    @cached_property
-    def _is_py_311_or_later(self) -> bool:  # pragma: no cover
-        return self.value not in {
-            self.PY_36.value,
-            self.PY_37.value,
-            self.PY_38.value,
-            self.PY_39.value,
-            self.PY_310.value,
-        }  # type: ignore
-
-    @property
-    def has_literal_type(self) -> bool:
-        return self._is_py_38_or_later
-
-    @property
-    def has_union_operator(self) -> bool:  # pragma: no cover
-        return self._is_py_310_or_later
-
-    @property
-    def has_annotated_type(self) -> bool:
-        return self._is_py_39_or_later
-
-    @property
-    def has_typed_dict(self) -> bool:
-        return self._is_py_38_or_later
-
-    @property
-    def has_typed_dict_non_required(self) -> bool:
-        return self._is_py_311_or_later
-
-    @property
-    def has_kw_only_dataclass(self) -> bool:
-        return self._is_py_310_or_later
-
-
-if TYPE_CHECKING:
-
-    class _TargetVersion(Enum): ...
-
-    BLACK_PYTHON_VERSION: Dict[PythonVersion, _TargetVersion]
-else:
-    BLACK_PYTHON_VERSION: Dict[PythonVersion, black.TargetVersion] = {
-        v: getattr(black.TargetVersion, f'PY{v.name.split("_")[-1]}')
-        for v in PythonVersion
-        if hasattr(black.TargetVersion, f'PY{v.name.split("_")[-1]}')
-    }
-
-
-def is_supported_in_black(python_version: PythonVersion) -> bool:  # pragma: no cover
-    return python_version in BLACK_PYTHON_VERSION
-
-
-def black_find_project_root(sources: Sequence[Path]) -> Path:
-    if TYPE_CHECKING:
-        from typing import Iterable, Tuple, Union
-
-        def _find_project_root(
-            srcs: Union[Sequence[str], Iterable[str]],
-        ) -> Union[Tuple[Path, str], Path]: ...
-
-    else:
-        from black import find_project_root as _find_project_root
-    project_root = _find_project_root(tuple(str(s) for s in sources))
-    if isinstance(project_root, tuple):
-        return project_root[0]
-    else:  # pragma: no cover
-        return project_root
-
-
-class CodeFormatter:
-    def __init__(
-        self,
-        python_version: PythonVersion,
-        settings_path: Optional[Path] = None,
-        wrap_string_literal: Optional[bool] = None,
-        skip_string_normalization: bool = True,
-        known_third_party: Optional[List[str]] = None,
-        custom_formatters: Optional[List[str]] = None,
-        custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
-    ) -> None:
-        if not settings_path:
-            settings_path = Path().resolve()
-
-        root = black_find_project_root((settings_path,))
-        path = root / 'pyproject.toml'
-        if path.is_file():
-            pyproject_toml = load_toml(path)
-            config = pyproject_toml.get('tool', {}).get('black', {})
-        else:
-            config = {}
-
-        black_kwargs: Dict[str, Any] = {}
-        if wrap_string_literal is not None:
-            experimental_string_processing = wrap_string_literal
-        else:
-            if black.__version__ < '24.1.0':  # type: ignore
-                experimental_string_processing = config.get(
-                    'experimental-string-processing'
-                )
-            else:
-                experimental_string_processing = config.get(
-                    'preview', False
-                ) and (  # pragma: no cover
-                    config.get('unstable', False)
-                    or 'string_processing' in config.get('enable-unstable-feature', [])
-                )
-
-        if experimental_string_processing is not None:  # pragma: no cover
-            if black.__version__.startswith('19.'):  # type: ignore
-                warn(
-                    f"black doesn't support `experimental-string-processing` option"  # type: ignore
-                    f' for wrapping string literal in {black.__version__}'
-                )
-            elif black.__version__ < '24.1.0':  # type: ignore
-                black_kwargs['experimental_string_processing'] = (
-                    experimental_string_processing
-                )
-            elif experimental_string_processing:
-                black_kwargs['preview'] = True
-                black_kwargs['unstable'] = config.get('unstable', False)
-                black_kwargs['enabled_features'] = {
-                    black.mode.Preview.string_processing
-                }
-
-        if TYPE_CHECKING:
-            self.black_mode: black.FileMode
-        else:
-            self.black_mode = black.FileMode(
-                target_versions={BLACK_PYTHON_VERSION[python_version]},
-                line_length=config.get('line-length', black.DEFAULT_LINE_LENGTH),
-                string_normalization=not skip_string_normalization
-                or not config.get('skip-string-normalization', True),
-                **black_kwargs,
-            )
-
-        self.settings_path: str = str(settings_path)
-
-        self.isort_config_kwargs: Dict[str, Any] = {}
-        if known_third_party:
-            self.isort_config_kwargs['known_third_party'] = known_third_party
-
-        if isort.__version__.startswith('4.'):
-            self.isort_config = None
-        else:
-            self.isort_config = isort.Config(
-                settings_path=self.settings_path, **self.isort_config_kwargs
-            )
-
-        self.custom_formatters_kwargs = custom_formatters_kwargs or {}
-        self.custom_formatters = self._check_custom_formatters(custom_formatters)
-
-    def _load_custom_formatter(
-        self, custom_formatter_import: str
-    ) -> CustomCodeFormatter:
-        import_ = import_module(custom_formatter_import)
-
-        if not hasattr(import_, 'CodeFormatter'):
-            raise NameError(
-                f'Custom formatter module `{import_.__name__}` must contains object with name Formatter'
-            )
-
-        formatter_class = import_.__getattribute__('CodeFormatter')
-
-        if not issubclass(formatter_class, CustomCodeFormatter):
-            raise TypeError(
-                f'The custom module {custom_formatter_import} must inherit from `datamodel-code-generator`'
-            )
-
-        return formatter_class(formatter_kwargs=self.custom_formatters_kwargs)
-
-    def _check_custom_formatters(
-        self, custom_formatters: Optional[List[str]]
-    ) -> List[CustomCodeFormatter]:
-        if custom_formatters is None:
-            return []
-
-        return [
-            self._load_custom_formatter(custom_formatter_import)
-            for custom_formatter_import in custom_formatters
-        ]
-
-    def format_code(
-        self,
-        code: str,
-    ) -> str:
-        code = self.apply_isort(code)
-        code = self.apply_black(code)
-
-        for formatter in self.custom_formatters:
-            code = formatter.apply(code)
-
-        return code
-
-    def apply_black(self, code: str) -> str:
-        return black.format_str(
-            code,
-            mode=self.black_mode,
-        )
-
-    if TYPE_CHECKING:
-
-        def apply_isort(self, code: str) -> str: ...
-
-    else:
-        if isort.__version__.startswith('4.'):
-
-            def apply_isort(self, code: str) -> str:
-                return isort.SortImports(
-                    file_contents=code,
-                    settings_path=self.settings_path,
-                    **self.isort_config_kwargs,
-                ).output
-
-        else:
-
-            def apply_isort(self, code: str) -> str:
-                return isort.code(code, config=self.isort_config)
-
-
-class CustomCodeFormatter:
-    def __init__(self, formatter_kwargs: Dict[str, Any]) -> None:
-        self.formatter_kwargs = formatter_kwargs
-
-    def apply(self, code: str) -> str:
-        raise NotImplementedError
diff -pruN 0.26.4-3/datamodel_code_generator/http.py 0.45.0-1/datamodel_code_generator/http.py
--- 0.26.4-3/datamodel_code_generator/http.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.45.0-1/datamodel_code_generator/http.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,29 +0,0 @@
-from __future__ import annotations
-
-from typing import Optional, Sequence, Tuple
-
-try:
-    import httpx
-except ImportError:  # pragma: no cover
-    raise Exception(
-        "Please run `$pip install 'datamodel-code-generator[http]`' to resolve URL Reference"
-    )
-
-
-def get_body(
-    url: str,
-    headers: Optional[Sequence[Tuple[str, str]]] = None,
-    ignore_tls: bool = False,
-    query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
-) -> str:
-    return httpx.get(
-        url,
-        headers=headers,
-        verify=not ignore_tls,
-        follow_redirects=True,
-        params=query_parameters,
-    ).text
-
-
-def join_url(url: str, ref: str = '.') -> str:
-    return str(httpx.URL(url).join(ref))
diff -pruN 0.26.4-3/datamodel_code_generator/imports.py 0.45.0-1/datamodel_code_generator/imports.py
--- 0.26.4-3/datamodel_code_generator/imports.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.45.0-1/datamodel_code_generator/imports.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,127 +0,0 @@
-from __future__ import annotations
-
-from collections import defaultdict
-from functools import lru_cache
-from typing import DefaultDict, Dict, Iterable, List, Optional, Set, Tuple, Union
-
-from datamodel_code_generator.util import BaseModel
-
-
-class Import(BaseModel):
-    from_: Optional[str] = None
-    import_: str
-    alias: Optional[str] = None
-    reference_path: Optional[str] = None
-
-    @classmethod
-    @lru_cache()
-    def from_full_path(cls, class_path: str) -> Import:
-        split_class_path: List[str] = class_path.split('.')
-        return Import(
-            from_='.'.join(split_class_path[:-1]) or None, import_=split_class_path[-1]
-        )
-
-
-class Imports(DefaultDict[Optional[str], Set[str]]):
-    def __str__(self) -> str:
-        return self.dump()
-
-    def __init__(self, use_exact: bool = False) -> None:
-        super().__init__(set)
-        self.alias: DefaultDict[Optional[str], Dict[str, str]] = defaultdict(dict)
-        self.counter: Dict[Tuple[Optional[str], str], int] = defaultdict(int)
-        self.reference_paths: Dict[str, Import] = {}
-        self.use_exact: bool = use_exact
-
-    def _set_alias(self, from_: Optional[str], imports: Set[str]) -> List[str]:
-        return [
-            f'{i} as {self.alias[from_][i]}'
-            if i in self.alias[from_] and i != self.alias[from_][i]
-            else i
-            for i in sorted(imports)
-        ]
-
-    def create_line(self, from_: Optional[str], imports: Set[str]) -> str:
-        if from_:
-            return f"from {from_} import {', '.join(self._set_alias(from_, imports))}"
-        return '\n'.join(f'import {i}' for i in self._set_alias(from_, imports))
-
-    def dump(self) -> str:
-        return '\n'.join(
-            self.create_line(from_, imports) for from_, imports in self.items()
-        )
-
-    def append(self, imports: Union[Import, Iterable[Import], None]) -> None:
-        if imports:
-            if isinstance(imports, Import):
-                imports = [imports]
-            for import_ in imports:
-                if import_.reference_path:
-                    self.reference_paths[import_.reference_path] = import_
-                if '.' in import_.import_:
-                    self[None].add(import_.import_)
-                    self.counter[(None, import_.import_)] += 1
-                else:
-                    self[import_.from_].add(import_.import_)
-                    self.counter[(import_.from_, import_.import_)] += 1
-                    if import_.alias:
-                        self.alias[import_.from_][import_.import_] = import_.alias
-
-    def remove(self, imports: Union[Import, Iterable[Import]]) -> None:
-        if isinstance(imports, Import):  # pragma: no cover
-            imports = [imports]
-        for import_ in imports:
-            if '.' in import_.import_:  # pragma: no cover
-                self.counter[(None, import_.import_)] -= 1
-                if self.counter[(None, import_.import_)] == 0:  # pragma: no cover
-                    self[None].remove(import_.import_)
-                    if not self[None]:
-                        del self[None]
-            else:
-                self.counter[(import_.from_, import_.import_)] -= 1  # pragma: no cover
-                if (
-                    self.counter[(import_.from_, import_.import_)] == 0
-                ):  # pragma: no cover
-                    self[import_.from_].remove(import_.import_)
-                    if not self[import_.from_]:
-                        del self[import_.from_]
-                    if import_.alias:  # pragma: no cover
-                        del self.alias[import_.from_][import_.import_]
-                        if not self.alias[import_.from_]:
-                            del self.alias[import_.from_]
-
-    def remove_referenced_imports(self, reference_path: str) -> None:
-        if reference_path in self.reference_paths:
-            self.remove(self.reference_paths[reference_path])
-
-
-IMPORT_ANNOTATED = Import.from_full_path('typing.Annotated')
-IMPORT_ANNOTATED_BACKPORT = Import.from_full_path('typing_extensions.Annotated')
-IMPORT_ANY = Import.from_full_path('typing.Any')
-IMPORT_LIST = Import.from_full_path('typing.List')
-IMPORT_SET = Import.from_full_path('typing.Set')
-IMPORT_UNION = Import.from_full_path('typing.Union')
-IMPORT_OPTIONAL = Import.from_full_path('typing.Optional')
-IMPORT_LITERAL = Import.from_full_path('typing.Literal')
-IMPORT_TYPE_ALIAS = Import.from_full_path('typing.TypeAlias')
-IMPORT_LITERAL_BACKPORT = Import.from_full_path('typing_extensions.Literal')
-IMPORT_SEQUENCE = Import.from_full_path('typing.Sequence')
-IMPORT_FROZEN_SET = Import.from_full_path('typing.FrozenSet')
-IMPORT_MAPPING = Import.from_full_path('typing.Mapping')
-IMPORT_ABC_SEQUENCE = Import.from_full_path('collections.abc.Sequence')
-IMPORT_ABC_SET = Import.from_full_path('collections.abc.Set')
-IMPORT_ABC_MAPPING = Import.from_full_path('collections.abc.Mapping')
-IMPORT_ENUM = Import.from_full_path('enum.Enum')
-IMPORT_ANNOTATIONS = Import.from_full_path('__future__.annotations')
-IMPORT_DICT = Import.from_full_path('typing.Dict')
-IMPORT_DECIMAL = Import.from_full_path('decimal.Decimal')
-IMPORT_DATE = Import.from_full_path('datetime.date')
-IMPORT_DATETIME = Import.from_full_path('datetime.datetime')
-IMPORT_TIMEDELTA = Import.from_full_path('datetime.timedelta')
-IMPORT_PATH = Import.from_full_path('pathlib.Path')
-IMPORT_TIME = Import.from_full_path('datetime.time')
-IMPORT_UUID = Import.from_full_path('uuid.UUID')
-IMPORT_PENDULUM_DATE = Import.from_full_path('pendulum.Date')
-IMPORT_PENDULUM_DATETIME = Import.from_full_path('pendulum.DateTime')
-IMPORT_PENDULUM_DURATION = Import.from_full_path('pendulum.Duration')
-IMPORT_PENDULUM_TIME = Import.from_full_path('pendulum.Time')
diff -pruN 0.26.4-3/datamodel_code_generator/model/__init__.py 0.45.0-1/datamodel_code_generator/model/__init__.py
--- 0.26.4-3/datamodel_code_generator/model/__init__.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.45.0-1/datamodel_code_generator/model/__init__.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,91 +0,0 @@
-from __future__ import annotations
-
-import sys
-from typing import TYPE_CHECKING, Callable, Iterable, List, NamedTuple, Optional, Type
-
-from .. import DatetimeClassType, PythonVersion
-from ..types import DataTypeManager as DataTypeManagerABC
-from .base import ConstraintsBase, DataModel, DataModelFieldBase
-
-if TYPE_CHECKING:
-    from .. import DataModelType
-
-DEFAULT_TARGET_DATETIME_CLASS = DatetimeClassType.Datetime
-DEFAULT_TARGET_PYTHON_VERSION = PythonVersion(
-    f'{sys.version_info.major}.{sys.version_info.minor}'
-)
-
-
-class DataModelSet(NamedTuple):
-    data_model: Type[DataModel]
-    root_model: Type[DataModel]
-    field_model: Type[DataModelFieldBase]
-    data_type_manager: Type[DataTypeManagerABC]
-    dump_resolve_reference_action: Optional[Callable[[Iterable[str]], str]]
-    known_third_party: Optional[List[str]] = None
-
-
-def get_data_model_types(
-    data_model_type: DataModelType,
-    target_python_version: PythonVersion = DEFAULT_TARGET_PYTHON_VERSION,
-    target_datetime_class: DatetimeClassType = DEFAULT_TARGET_DATETIME_CLASS,
-) -> DataModelSet:
-    from .. import DataModelType
-    from . import dataclass, msgspec, pydantic, pydantic_v2, rootmodel, typed_dict
-    from .types import DataTypeManager
-
-    if data_model_type == DataModelType.PydanticBaseModel:
-        return DataModelSet(
-            data_model=pydantic.BaseModel,
-            root_model=pydantic.CustomRootType,
-            field_model=pydantic.DataModelField,
-            data_type_manager=pydantic.DataTypeManager,
-            dump_resolve_reference_action=pydantic.dump_resolve_reference_action,
-        )
-    elif data_model_type == DataModelType.PydanticV2BaseModel:
-        return DataModelSet(
-            data_model=pydantic_v2.BaseModel,
-            root_model=pydantic_v2.RootModel,
-            field_model=pydantic_v2.DataModelField,
-            data_type_manager=pydantic_v2.DataTypeManager,
-            dump_resolve_reference_action=pydantic_v2.dump_resolve_reference_action,
-        )
-    elif data_model_type == DataModelType.DataclassesDataclass:
-        return DataModelSet(
-            data_model=dataclass.DataClass,
-            root_model=rootmodel.RootModel,
-            field_model=dataclass.DataModelField,
-            data_type_manager=dataclass.DataTypeManager,
-            dump_resolve_reference_action=None,
-        )
-    elif data_model_type == DataModelType.TypingTypedDict:
-        return DataModelSet(
-            data_model=(
-                typed_dict.TypedDict
-                if target_python_version.has_typed_dict
-                else typed_dict.TypedDictBackport
-            ),
-            root_model=rootmodel.RootModel,
-            field_model=(
-                typed_dict.DataModelField
-                if target_python_version.has_typed_dict_non_required
-                else typed_dict.DataModelFieldBackport
-            ),
-            data_type_manager=DataTypeManager,
-            dump_resolve_reference_action=None,
-        )
-    elif data_model_type == DataModelType.MsgspecStruct:
-        return DataModelSet(
-            data_model=msgspec.Struct,
-            root_model=msgspec.RootModel,
-            field_model=msgspec.DataModelField,
-            data_type_manager=msgspec.DataTypeManager,
-            dump_resolve_reference_action=None,
-            known_third_party=['msgspec'],
-        )
-    raise ValueError(
-        f'{data_model_type} is unsupported data model type'
-    )  # pragma: no cover
-
-
-__all__ = ['ConstraintsBase', 'DataModel', 'DataModelFieldBase']
diff -pruN 0.26.4-3/datamodel_code_generator/model/base.py 0.45.0-1/datamodel_code_generator/model/base.py
--- 0.26.4-3/datamodel_code_generator/model/base.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.45.0-1/datamodel_code_generator/model/base.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,468 +0,0 @@
-from abc import ABC, abstractmethod
-from collections import defaultdict
-from copy import deepcopy
-from functools import lru_cache
-from pathlib import Path
-from typing import (
-    TYPE_CHECKING,
-    Any,
-    ClassVar,
-    DefaultDict,
-    Dict,
-    FrozenSet,
-    Iterator,
-    List,
-    Optional,
-    Set,
-    Tuple,
-    TypeVar,
-    Union,
-)
-from warnings import warn
-
-from jinja2 import Environment, FileSystemLoader, Template
-from pydantic import Field
-
-from datamodel_code_generator.imports import (
-    IMPORT_ANNOTATED,
-    IMPORT_ANNOTATED_BACKPORT,
-    IMPORT_OPTIONAL,
-    IMPORT_UNION,
-    Import,
-)
-from datamodel_code_generator.reference import Reference, _BaseModel
-from datamodel_code_generator.types import (
-    ANY,
-    NONE,
-    UNION_PREFIX,
-    DataType,
-    Nullable,
-    chain_as_tuple,
-    get_optional_type,
-)
-from datamodel_code_generator.util import PYDANTIC_V2, ConfigDict, cached_property
-
-TEMPLATE_DIR: Path = Path(__file__).parents[0] / 'template'
-
-ALL_MODEL: str = '#all#'
-
-ConstraintsBaseT = TypeVar('ConstraintsBaseT', bound='ConstraintsBase')
-
-
-class ConstraintsBase(_BaseModel):
-    unique_items: Optional[bool] = Field(None, alias='uniqueItems')
-    _exclude_fields: ClassVar[Set[str]] = {'has_constraints'}
-    if PYDANTIC_V2:
-        model_config = ConfigDict(
-            arbitrary_types_allowed=True, ignored_types=(cached_property,)
-        )
-    else:
-
-        class Config:
-            arbitrary_types_allowed = True
-            keep_untouched = (cached_property,)
-
-    @cached_property
-    def has_constraints(self) -> bool:
-        return any(v is not None for v in self.dict().values())
-
-    @staticmethod
-    def merge_constraints(
-        a: ConstraintsBaseT, b: ConstraintsBaseT
-    ) -> Optional[ConstraintsBaseT]:
-        constraints_class = None
-        if isinstance(a, ConstraintsBase):  # pragma: no cover
-            root_type_field_constraints = {
-                k: v for k, v in a.dict(by_alias=True).items() if v is not None
-            }
-            constraints_class = a.__class__
-        else:
-            root_type_field_constraints = {}  # pragma: no cover
-
-        if isinstance(b, ConstraintsBase):  # pragma: no cover
-            model_field_constraints = {
-                k: v for k, v in b.dict(by_alias=True).items() if v is not None
-            }
-            constraints_class = constraints_class or b.__class__
-        else:
-            model_field_constraints = {}
-
-        if not issubclass(constraints_class, ConstraintsBase):  # pragma: no cover
-            return None
-
-        return constraints_class.parse_obj(
-            {
-                **root_type_field_constraints,
-                **model_field_constraints,
-            }
-        )
-
-
-class DataModelFieldBase(_BaseModel):
-    name: Optional[str] = None
-    default: Optional[Any] = None
-    required: bool = False
-    alias: Optional[str] = None
-    data_type: DataType
-    constraints: Any = None
-    strip_default_none: bool = False
-    nullable: Optional[bool] = None
-    parent: Optional[Any] = None
-    extras: Dict[str, Any] = {}
-    use_annotated: bool = False
-    has_default: bool = False
-    use_field_description: bool = False
-    const: bool = False
-    original_name: Optional[str] = None
-    use_default_kwarg: bool = False
-    use_one_literal_as_default: bool = False
-    _exclude_fields: ClassVar[Set[str]] = {'parent'}
-    _pass_fields: ClassVar[Set[str]] = {'parent', 'data_type'}
-    can_have_extra_keys: ClassVar[bool] = True
-    type_has_null: Optional[bool] = None
-
-    if not TYPE_CHECKING:
-
-        def __init__(self, **data: Any) -> None:
-            super().__init__(**data)
-            if self.data_type.reference or self.data_type.data_types:
-                self.data_type.parent = self
-            self.process_const()
-
-    def process_const(self) -> None:
-        if 'const' not in self.extras:
-            return None
-        self.default = self.extras['const']
-        self.const = True
-        self.required = False
-        self.nullable = False
-
-    @property
-    def type_hint(self) -> str:
-        type_hint = self.data_type.type_hint
-
-        if not type_hint:
-            return NONE
-        elif self.has_default_factory:
-            return type_hint
-        elif self.data_type.is_optional and self.data_type.type != ANY:
-            return type_hint
-        elif self.nullable is not None:
-            if self.nullable:
-                return get_optional_type(type_hint, self.data_type.use_union_operator)
-            return type_hint
-        elif self.required:
-            if self.type_has_null:
-                return get_optional_type(type_hint, self.data_type.use_union_operator)
-            return type_hint
-        elif self.fall_back_to_nullable:
-            return get_optional_type(type_hint, self.data_type.use_union_operator)
-        else:
-            return type_hint
-
-    @property
-    def imports(self) -> Tuple[Import, ...]:
-        type_hint = self.type_hint
-        has_union = not self.data_type.use_union_operator and UNION_PREFIX in type_hint
-        imports: List[Union[Tuple[Import], Iterator[Import]]] = [
-            (
-                i
-                for i in self.data_type.all_imports
-                if not (not has_union and i == IMPORT_UNION)
-            )
-        ]
-
-        if self.fall_back_to_nullable:
-            if (
-                self.nullable or (self.nullable is None and not self.required)
-            ) and not self.data_type.use_union_operator:
-                imports.append((IMPORT_OPTIONAL,))
-        else:
-            if (
-                self.nullable and not self.data_type.use_union_operator
-            ):  # pragma: no cover
-                imports.append((IMPORT_OPTIONAL,))
-        if self.use_annotated and self.annotated:
-            import_annotated = (
-                IMPORT_ANNOTATED
-                if self.data_type.python_version.has_annotated_type
-                else IMPORT_ANNOTATED_BACKPORT
-            )
-            imports.append((import_annotated,))
-        return chain_as_tuple(*imports)
-
-    @property
-    def docstring(self) -> Optional[str]:
-        if self.use_field_description:
-            description = self.extras.get('description', None)
-            if description is not None:
-                return f'{description}'
-        return None
-
-    @property
-    def unresolved_types(self) -> FrozenSet[str]:
-        return self.data_type.unresolved_types
-
-    @property
-    def field(self) -> Optional[str]:
-        """for backwards compatibility"""
-        return None
-
-    @property
-    def method(self) -> Optional[str]:
-        return None
-
-    @property
-    def represented_default(self) -> str:
-        return repr(self.default)
-
-    @property
-    def annotated(self) -> Optional[str]:
-        return None
-
-    @property
-    def has_default_factory(self) -> bool:
-        return 'default_factory' in self.extras
-
-    @property
-    def fall_back_to_nullable(self) -> bool:
-        return True
-
-
-@lru_cache()
-def get_template(template_file_path: Path) -> Template:
-    loader = FileSystemLoader(str(TEMPLATE_DIR / template_file_path.parent))
-    environment: Environment = Environment(loader=loader)
-    return environment.get_template(template_file_path.name)
-
-
-def get_module_path(name: str, file_path: Optional[Path]) -> List[str]:
-    if file_path:
-        return [
-            *file_path.parts[:-1],
-            file_path.stem,
-            *name.split('.')[:-1],
-        ]
-    return name.split('.')[:-1]
-
-
-def get_module_name(name: str, file_path: Optional[Path]) -> str:
-    return '.'.join(get_module_path(name, file_path))
-
-
-class TemplateBase(ABC):
-    @property
-    @abstractmethod
-    def template_file_path(self) -> Path:
-        raise NotImplementedError
-
-    @cached_property
-    def template(self) -> Template:
-        return get_template(self.template_file_path)
-
-    @abstractmethod
-    def render(self) -> str:
-        raise NotImplementedError
-
-    def _render(self, *args: Any, **kwargs: Any) -> str:
-        return self.template.render(*args, **kwargs)
-
-    def __str__(self) -> str:
-        return self.render()
-
-
-class BaseClassDataType(DataType): ...
-
-
-UNDEFINED: Any = object()
-
-
-class DataModel(TemplateBase, Nullable, ABC):
-    TEMPLATE_FILE_PATH: ClassVar[str] = ''
-    BASE_CLASS: ClassVar[str] = ''
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = ()
-
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        methods: Optional[List[str]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ) -> None:
-        self.keyword_only = keyword_only
-        if not self.TEMPLATE_FILE_PATH:
-            raise Exception('TEMPLATE_FILE_PATH is undefined')
-
-        self._custom_template_dir: Optional[Path] = custom_template_dir
-        self.decorators: List[str] = decorators or []
-        self._additional_imports: List[Import] = []
-        self.custom_base_class = custom_base_class
-        if base_classes:
-            self.base_classes: List[BaseClassDataType] = [
-                BaseClassDataType(reference=b) for b in base_classes
-            ]
-        else:
-            self.set_base_class()
-
-        self.file_path: Optional[Path] = path
-        self.reference: Reference = reference
-
-        self.reference.source = self
-
-        self.extra_template_data = (
-            # The supplied defaultdict will either create a new entry,
-            # or already contain a predefined entry for this type
-            extra_template_data[self.name]
-            if extra_template_data is not None
-            else defaultdict(dict)
-        )
-
-        self.fields = self._validate_fields(fields) if fields else []
-
-        for base_class in self.base_classes:
-            if base_class.reference:
-                base_class.reference.children.append(self)
-
-        if extra_template_data is not None:
-            all_model_extra_template_data = extra_template_data.get(ALL_MODEL)
-            if all_model_extra_template_data:
-                # The deepcopy is needed here to ensure that different models don't
-                # end up inadvertently sharing state (such as "base_class_kwargs")
-                self.extra_template_data.update(deepcopy(all_model_extra_template_data))
-
-        self.methods: List[str] = methods or []
-
-        self.description = description
-        for field in self.fields:
-            field.parent = self
-
-        self._additional_imports.extend(self.DEFAULT_IMPORTS)
-        self.default: Any = default
-        self._nullable: bool = nullable
-
-    def _validate_fields(
-        self, fields: List[DataModelFieldBase]
-    ) -> List[DataModelFieldBase]:
-        names: Set[str] = set()
-        unique_fields: List[DataModelFieldBase] = []
-        for field in fields:
-            if field.name:
-                if field.name in names:
-                    warn(f'Field name `{field.name}` is duplicated on {self.name}')
-                    continue
-                else:
-                    names.add(field.name)
-            unique_fields.append(field)
-        return unique_fields
-
-    def set_base_class(self) -> None:
-        base_class = self.custom_base_class or self.BASE_CLASS
-        if not base_class:
-            self.base_classes = []
-            return None
-        base_class_import = Import.from_full_path(base_class)
-        self._additional_imports.append(base_class_import)
-        self.base_classes = [BaseClassDataType.from_import(base_class_import)]
-
-    @cached_property
-    def template_file_path(self) -> Path:
-        template_file_path = Path(self.TEMPLATE_FILE_PATH)
-        if self._custom_template_dir is not None:
-            custom_template_file_path = self._custom_template_dir / template_file_path
-            if custom_template_file_path.exists():
-                return custom_template_file_path
-        return template_file_path
-
-    @property
-    def imports(self) -> Tuple[Import, ...]:
-        return chain_as_tuple(
-            (i for f in self.fields for i in f.imports),
-            self._additional_imports,
-        )
-
-    @property
-    def reference_classes(self) -> FrozenSet[str]:
-        return frozenset(
-            {r.reference.path for r in self.base_classes if r.reference}
-            | {t for f in self.fields for t in f.unresolved_types}
-        )
-
-    @property
-    def name(self) -> str:
-        return self.reference.name
-
-    @property
-    def duplicate_name(self) -> str:
-        return self.reference.duplicate_name or ''
-
-    @property
-    def base_class(self) -> str:
-        return ', '.join(b.type_hint for b in self.base_classes)
-
-    @staticmethod
-    def _get_class_name(name: str) -> str:
-        if '.' in name:
-            return name.rsplit('.', 1)[-1]
-        return name
-
-    @property
-    def class_name(self) -> str:
-        return self._get_class_name(self.name)
-
-    @class_name.setter
-    def class_name(self, class_name: str) -> None:
-        if '.' in self.reference.name:
-            self.reference.name = (
-                f"{self.reference.name.rsplit('.', 1)[0]}.{class_name}"
-            )
-        else:
-            self.reference.name = class_name
-
-    @property
-    def duplicate_class_name(self) -> str:
-        return self._get_class_name(self.duplicate_name)
-
-    @property
-    def module_path(self) -> List[str]:
-        return get_module_path(self.name, self.file_path)
-
-    @property
-    def module_name(self) -> str:
-        return get_module_name(self.name, self.file_path)
-
-    @property
-    def all_data_types(self) -> Iterator[DataType]:
-        for field in self.fields:
-            yield from field.data_type.all_data_types
-        yield from self.base_classes
-
-    @property
-    def nullable(self) -> bool:
-        return self._nullable
-
-    @cached_property
-    def path(self) -> str:
-        return self.reference.path
-
-    def render(self, *, class_name: Optional[str] = None) -> str:
-        response = self._render(
-            class_name=class_name or self.class_name,
-            fields=self.fields,
-            decorators=self.decorators,
-            base_class=self.base_class,
-            methods=self.methods,
-            description=self.description,
-            keyword_only=self.keyword_only,
-            **self.extra_template_data,
-        )
-        return response
diff -pruN 0.26.4-3/datamodel_code_generator/model/dataclass.py 0.45.0-1/datamodel_code_generator/model/dataclass.py
--- 0.26.4-3/datamodel_code_generator/model/dataclass.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.45.0-1/datamodel_code_generator/model/dataclass.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,181 +0,0 @@
-from pathlib import Path
-from typing import (
-    Any,
-    ClassVar,
-    DefaultDict,
-    Dict,
-    List,
-    Optional,
-    Sequence,
-    Set,
-    Tuple,
-)
-
-from datamodel_code_generator import DatetimeClassType, PythonVersion
-from datamodel_code_generator.imports import (
-    IMPORT_DATE,
-    IMPORT_DATETIME,
-    IMPORT_TIME,
-    IMPORT_TIMEDELTA,
-    Import,
-)
-from datamodel_code_generator.model import DataModel, DataModelFieldBase
-from datamodel_code_generator.model.base import UNDEFINED
-from datamodel_code_generator.model.imports import IMPORT_DATACLASS, IMPORT_FIELD
-from datamodel_code_generator.model.pydantic.base_model import Constraints
-from datamodel_code_generator.model.types import DataTypeManager as _DataTypeManager
-from datamodel_code_generator.model.types import type_map_factory
-from datamodel_code_generator.reference import Reference
-from datamodel_code_generator.types import DataType, StrictTypes, Types, chain_as_tuple
-
-
-def _has_field_assignment(field: DataModelFieldBase) -> bool:
-    return bool(field.field) or not (
-        field.required
-        or (field.represented_default == 'None' and field.strip_default_none)
-    )
-
-
-class DataClass(DataModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'dataclass.jinja2'
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_DATACLASS,)
-
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        methods: Optional[List[str]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ) -> None:
-        super().__init__(
-            reference=reference,
-            fields=sorted(fields, key=_has_field_assignment, reverse=False),
-            decorators=decorators,
-            base_classes=base_classes,
-            custom_base_class=custom_base_class,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            methods=methods,
-            path=path,
-            description=description,
-            default=default,
-            nullable=nullable,
-            keyword_only=keyword_only,
-        )
-
-
-class DataModelField(DataModelFieldBase):
-    _FIELD_KEYS: ClassVar[Set[str]] = {
-        'default_factory',
-        'init',
-        'repr',
-        'hash',
-        'compare',
-        'metadata',
-        'kw_only',
-    }
-    constraints: Optional[Constraints] = None
-
-    @property
-    def imports(self) -> Tuple[Import, ...]:
-        field = self.field
-        if field and field.startswith('field('):
-            return chain_as_tuple(super().imports, (IMPORT_FIELD,))
-        return super().imports
-
-    def self_reference(self) -> bool:  # pragma: no cover
-        return isinstance(self.parent, DataClass) and self.parent.reference.path in {
-            d.reference.path for d in self.data_type.all_data_types if d.reference
-        }
-
-    @property
-    def field(self) -> Optional[str]:
-        """for backwards compatibility"""
-        result = str(self)
-        if result == '':
-            return None
-
-        return result
-
-    def __str__(self) -> str:
-        data: Dict[str, Any] = {
-            k: v for k, v in self.extras.items() if k in self._FIELD_KEYS
-        }
-
-        if self.default != UNDEFINED and self.default is not None:
-            data['default'] = self.default
-
-        if self.required:
-            data = {
-                k: v
-                for k, v in data.items()
-                if k
-                not in (
-                    'default',
-                    'default_factory',
-                )
-            }
-
-        if not data:
-            return ''
-
-        if len(data) == 1 and 'default' in data:
-            default = data['default']
-
-            if isinstance(default, (list, dict)):
-                return f'field(default_factory=lambda :{repr(default)})'
-            return repr(default)
-        kwargs = [
-            f'{k}={v if k == "default_factory" else repr(v)}' for k, v in data.items()
-        ]
-        return f'field({", ".join(kwargs)})'
-
-
-class DataTypeManager(_DataTypeManager):
-    def __init__(
-        self,
-        python_version: PythonVersion = PythonVersion.PY_38,
-        use_standard_collections: bool = False,
-        use_generic_container_types: bool = False,
-        strict_types: Optional[Sequence[StrictTypes]] = None,
-        use_non_positive_negative_number_constrained_types: bool = False,
-        use_union_operator: bool = False,
-        use_pendulum: bool = False,
-        target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
-    ):
-        super().__init__(
-            python_version,
-            use_standard_collections,
-            use_generic_container_types,
-            strict_types,
-            use_non_positive_negative_number_constrained_types,
-            use_union_operator,
-            use_pendulum,
-            target_datetime_class,
-        )
-
-        datetime_map = (
-            {
-                Types.time: self.data_type.from_import(IMPORT_TIME),
-                Types.date: self.data_type.from_import(IMPORT_DATE),
-                Types.date_time: self.data_type.from_import(IMPORT_DATETIME),
-                Types.timedelta: self.data_type.from_import(IMPORT_TIMEDELTA),
-            }
-            if target_datetime_class is DatetimeClassType.Datetime
-            else {}
-        )
-
-        self.type_map: Dict[Types, DataType] = {
-            **type_map_factory(self.data_type),
-            **datetime_map,
-        }
diff -pruN 0.26.4-3/datamodel_code_generator/model/enum.py 0.45.0-1/datamodel_code_generator/model/enum.py
--- 0.26.4-3/datamodel_code_generator/model/enum.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.45.0-1/datamodel_code_generator/model/enum.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,102 +0,0 @@
-from __future__ import annotations
-
-from pathlib import Path
-from typing import Any, ClassVar, DefaultDict, Dict, List, Optional, Tuple
-
-from datamodel_code_generator.imports import IMPORT_ANY, IMPORT_ENUM, Import
-from datamodel_code_generator.model import DataModel, DataModelFieldBase
-from datamodel_code_generator.model.base import UNDEFINED, BaseClassDataType
-from datamodel_code_generator.reference import Reference
-from datamodel_code_generator.types import DataType, Types
-
-_INT: str = 'int'
-_FLOAT: str = 'float'
-_BYTES: str = 'bytes'
-_STR: str = 'str'
-
-SUBCLASS_BASE_CLASSES: Dict[Types, str] = {
-    Types.int32: _INT,
-    Types.int64: _INT,
-    Types.integer: _INT,
-    Types.float: _FLOAT,
-    Types.double: _FLOAT,
-    Types.number: _FLOAT,
-    Types.byte: _BYTES,
-    Types.string: _STR,
-}
-
-
-class Enum(DataModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'Enum.jinja2'
-    BASE_CLASS: ClassVar[str] = 'enum.Enum'
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_ENUM,)
-
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        methods: Optional[List[str]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        type_: Optional[Types] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ):
-        super().__init__(
-            reference=reference,
-            fields=fields,
-            decorators=decorators,
-            base_classes=base_classes,
-            custom_base_class=custom_base_class,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            methods=methods,
-            path=path,
-            description=description,
-            default=default,
-            nullable=nullable,
-            keyword_only=keyword_only,
-        )
-
-        if not base_classes and type_:
-            base_class = SUBCLASS_BASE_CLASSES.get(type_)
-            if base_class:
-                self.base_classes: List[BaseClassDataType] = [
-                    BaseClassDataType(type=base_class),
-                    *self.base_classes,
-                ]
-
-    @classmethod
-    def get_data_type(cls, types: Types, **kwargs: Any) -> DataType:
-        raise NotImplementedError
-
-    def get_member(self, field: DataModelFieldBase) -> Member:
-        return Member(self, field)
-
-    def find_member(self, value: Any) -> Optional[Member]:
-        repr_value = repr(value)
-        for field in self.fields:  # pragma: no cover
-            if field.default == repr_value:
-                return self.get_member(field)
-        return None  # pragma: no cover
-
-    @property
-    def imports(self) -> Tuple[Import, ...]:
-        return tuple(i for i in super().imports if i != IMPORT_ANY)
-
-
-class Member:
-    def __init__(self, enum: Enum, field: DataModelFieldBase) -> None:
-        self.enum: Enum = enum
-        self.field: DataModelFieldBase = field
-        self.alias: Optional[str] = None
-
-    def __repr__(self) -> str:
-        return f'{self.alias or self.enum.name}.{self.field.name}'
diff -pruN 0.26.4-3/datamodel_code_generator/model/imports.py 0.45.0-1/datamodel_code_generator/model/imports.py
--- 0.26.4-3/datamodel_code_generator/model/imports.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.45.0-1/datamodel_code_generator/model/imports.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,13 +0,0 @@
-from datamodel_code_generator.imports import Import
-
-IMPORT_DATACLASS = Import.from_full_path('dataclasses.dataclass')
-IMPORT_FIELD = Import.from_full_path('dataclasses.field')
-IMPORT_CLASSVAR = Import.from_full_path('typing.ClassVar')
-IMPORT_TYPED_DICT = Import.from_full_path('typing.TypedDict')
-IMPORT_TYPED_DICT_BACKPORT = Import.from_full_path('typing_extensions.TypedDict')
-IMPORT_NOT_REQUIRED = Import.from_full_path('typing.NotRequired')
-IMPORT_NOT_REQUIRED_BACKPORT = Import.from_full_path('typing_extensions.NotRequired')
-IMPORT_MSGSPEC_STRUCT = Import.from_full_path('msgspec.Struct')
-IMPORT_MSGSPEC_FIELD = Import.from_full_path('msgspec.field')
-IMPORT_MSGSPEC_META = Import.from_full_path('msgspec.Meta')
-IMPORT_MSGSPEC_CONVERT = Import.from_full_path('msgspec.convert')
diff -pruN 0.26.4-3/datamodel_code_generator/model/msgspec.py 0.45.0-1/datamodel_code_generator/model/msgspec.py
--- 0.26.4-3/datamodel_code_generator/model/msgspec.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.45.0-1/datamodel_code_generator/model/msgspec.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,338 +0,0 @@
-from functools import wraps
-from pathlib import Path
-from typing import (
-    Any,
-    ClassVar,
-    DefaultDict,
-    Dict,
-    List,
-    Optional,
-    Sequence,
-    Set,
-    Tuple,
-    Type,
-    TypeVar,
-)
-
-from pydantic import Field
-
-from datamodel_code_generator import DatetimeClassType, PythonVersion
-from datamodel_code_generator.imports import (
-    IMPORT_DATE,
-    IMPORT_DATETIME,
-    IMPORT_TIME,
-    IMPORT_TIMEDELTA,
-    Import,
-)
-from datamodel_code_generator.model import DataModel, DataModelFieldBase
-from datamodel_code_generator.model.base import UNDEFINED
-from datamodel_code_generator.model.imports import (
-    IMPORT_CLASSVAR,
-    IMPORT_MSGSPEC_CONVERT,
-    IMPORT_MSGSPEC_FIELD,
-    IMPORT_MSGSPEC_META,
-)
-from datamodel_code_generator.model.pydantic.base_model import (
-    Constraints as _Constraints,
-)
-from datamodel_code_generator.model.rootmodel import RootModel as _RootModel
-from datamodel_code_generator.model.types import DataTypeManager as _DataTypeManager
-from datamodel_code_generator.model.types import type_map_factory
-from datamodel_code_generator.reference import Reference
-from datamodel_code_generator.types import (
-    DataType,
-    StrictTypes,
-    Types,
-    chain_as_tuple,
-    get_optional_type,
-)
-
-
-def _has_field_assignment(field: DataModelFieldBase) -> bool:
-    return not (
-        field.required
-        or (field.represented_default == 'None' and field.strip_default_none)
-    )
-
-
-DataModelFieldBaseT = TypeVar('DataModelFieldBaseT', bound=DataModelFieldBase)
-
-
-def import_extender(cls: Type[DataModelFieldBaseT]) -> Type[DataModelFieldBaseT]:
-    original_imports: property = getattr(cls, 'imports', None)  # type: ignore
-
-    @wraps(original_imports.fget)  # type: ignore
-    def new_imports(self: DataModelFieldBaseT) -> Tuple[Import, ...]:
-        extra_imports = []
-        field = self.field
-        # TODO: Improve field detection
-        if field and field.startswith('field('):
-            extra_imports.append(IMPORT_MSGSPEC_FIELD)
-        if self.field and 'lambda: convert' in self.field:
-            extra_imports.append(IMPORT_MSGSPEC_CONVERT)
-        if self.annotated:
-            extra_imports.append(IMPORT_MSGSPEC_META)
-        if self.extras.get('is_classvar'):
-            extra_imports.append(IMPORT_CLASSVAR)
-        return chain_as_tuple(original_imports.fget(self), extra_imports)  # type: ignore
-
-    setattr(cls, 'imports', property(new_imports))
-    return cls
-
-
-class RootModel(_RootModel):
-    pass
-
-
-class Struct(DataModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'msgspec.jinja2'
-    BASE_CLASS: ClassVar[str] = 'msgspec.Struct'
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = ()
-
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        methods: Optional[List[str]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ) -> None:
-        super().__init__(
-            reference=reference,
-            fields=sorted(fields, key=_has_field_assignment, reverse=False),
-            decorators=decorators,
-            base_classes=base_classes,
-            custom_base_class=custom_base_class,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            methods=methods,
-            path=path,
-            description=description,
-            default=default,
-            nullable=nullable,
-            keyword_only=keyword_only,
-        )
-        self.extra_template_data.setdefault('base_class_kwargs', {})
-        if self.keyword_only:
-            self.add_base_class_kwarg('kw_only', 'True')
-
-    def add_base_class_kwarg(self, name: str, value):
-        self.extra_template_data['base_class_kwargs'][name] = value
-
-
-class Constraints(_Constraints):
-    # To override existing pattern alias
-    regex: Optional[str] = Field(None, alias='regex')
-    pattern: Optional[str] = Field(None, alias='pattern')
-
-
-@import_extender
-class DataModelField(DataModelFieldBase):
-    _FIELD_KEYS: ClassVar[Set[str]] = {
-        'default',
-        'default_factory',
-    }
-    _META_FIELD_KEYS: ClassVar[Set[str]] = {
-        'title',
-        'description',
-        'gt',
-        'ge',
-        'lt',
-        'le',
-        'multiple_of',
-        # 'min_items', # not supported by msgspec
-        # 'max_items', # not supported by msgspec
-        'min_length',
-        'max_length',
-        'pattern',
-        'examples',
-        # 'unique_items', # not supported by msgspec
-    }
-    _PARSE_METHOD = 'convert'
-    _COMPARE_EXPRESSIONS: ClassVar[Set[str]] = {'gt', 'ge', 'lt', 'le', 'multiple_of'}
-    constraints: Optional[Constraints] = None
-
-    def self_reference(self) -> bool:  # pragma: no cover
-        return isinstance(self.parent, Struct) and self.parent.reference.path in {
-            d.reference.path for d in self.data_type.all_data_types if d.reference
-        }
-
-    def process_const(self) -> None:
-        if 'const' not in self.extras:
-            return None
-        self.const = True
-        self.nullable = False
-        const = self.extras['const']
-        if self.data_type.type == 'str' and isinstance(
-            const, str
-        ):  # pragma: no cover # Literal supports only str
-            self.data_type = self.data_type.__class__(literals=[const])
-
-    def _get_strict_field_constraint_value(self, constraint: str, value: Any) -> Any:
-        if value is None or constraint not in self._COMPARE_EXPRESSIONS:
-            return value
-
-        if any(
-            data_type.type == 'float' for data_type in self.data_type.all_data_types
-        ):
-            return float(value)
-        return int(value)
-
-    @property
-    def field(self) -> Optional[str]:
-        """for backwards compatibility"""
-        result = str(self)
-        if result == '':
-            return None
-
-        return result
-
-    def __str__(self) -> str:
-        data: Dict[str, Any] = {
-            k: v for k, v in self.extras.items() if k in self._FIELD_KEYS
-        }
-        if self.alias:
-            data['name'] = self.alias
-
-        if self.default != UNDEFINED and self.default is not None:
-            data['default'] = self.default
-        elif not self.required:
-            data['default'] = None
-
-        if self.required:
-            data = {
-                k: v
-                for k, v in data.items()
-                if k
-                not in (
-                    'default',
-                    'default_factory',
-                )
-            }
-        elif self.default and 'default_factory' not in data:
-            default_factory = self._get_default_as_struct_model()
-            if default_factory is not None:
-                data.pop('default')
-                data['default_factory'] = default_factory
-
-        if not data:
-            return ''
-
-        if len(data) == 1 and 'default' in data:
-            return repr(data['default'])
-
-        kwargs = [
-            f'{k}={v if k == "default_factory" else repr(v)}' for k, v in data.items()
-        ]
-        return f'field({", ".join(kwargs)})'
-
-    @property
-    def annotated(self) -> Optional[str]:
-        if not self.use_annotated:  # pragma: no cover
-            return None
-
-        data: Dict[str, Any] = {
-            k: v for k, v in self.extras.items() if k in self._META_FIELD_KEYS
-        }
-        if (
-            self.constraints is not None
-            and not self.self_reference()
-            and not self.data_type.strict
-        ):
-            data = {
-                **data,
-                **{
-                    k: self._get_strict_field_constraint_value(k, v)
-                    for k, v in self.constraints.dict().items()
-                    if k in self._META_FIELD_KEYS
-                },
-            }
-
-        meta_arguments = sorted(
-            f'{k}={repr(v)}' for k, v in data.items() if v is not None
-        )
-        if not meta_arguments:
-            return None
-
-        meta = f'Meta({", ".join(meta_arguments)})'
-
-        if not self.required and not self.extras.get('is_classvar'):
-            type_hint = self.data_type.type_hint
-            annotated_type = f'Annotated[{type_hint}, {meta}]'
-            return get_optional_type(annotated_type, self.data_type.use_union_operator)
-
-        annotated_type = f'Annotated[{self.type_hint}, {meta}]'
-        if self.extras.get('is_classvar'):
-            annotated_type = f'ClassVar[{annotated_type}]'
-
-        return annotated_type
-
-    def _get_default_as_struct_model(self) -> Optional[str]:
-        for data_type in self.data_type.data_types or (self.data_type,):
-            # TODO: Check nested data_types
-            if data_type.is_dict or self.data_type.is_union:
-                # TODO: Parse Union and dict model for default
-                continue  # pragma: no cover
-            elif data_type.is_list and len(data_type.data_types) == 1:
-                data_type = data_type.data_types[0]
-                if (  # pragma: no cover
-                    data_type.reference
-                    and (
-                        isinstance(data_type.reference.source, Struct)
-                        or isinstance(data_type.reference.source, RootModel)
-                    )
-                    and isinstance(self.default, list)
-                ):
-                    return f'lambda: {self._PARSE_METHOD}({repr(self.default)},  type=list[{data_type.alias or data_type.reference.source.class_name}])'
-            elif data_type.reference and isinstance(data_type.reference.source, Struct):
-                return f'lambda: {self._PARSE_METHOD}({repr(self.default)},  type={data_type.alias or data_type.reference.source.class_name})'
-        return None
-
-
-class DataTypeManager(_DataTypeManager):
-    def __init__(
-        self,
-        python_version: PythonVersion = PythonVersion.PY_38,
-        use_standard_collections: bool = False,
-        use_generic_container_types: bool = False,
-        strict_types: Optional[Sequence[StrictTypes]] = None,
-        use_non_positive_negative_number_constrained_types: bool = False,
-        use_union_operator: bool = False,
-        use_pendulum: bool = False,
-        target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
-    ):
-        super().__init__(
-            python_version,
-            use_standard_collections,
-            use_generic_container_types,
-            strict_types,
-            use_non_positive_negative_number_constrained_types,
-            use_union_operator,
-            use_pendulum,
-            target_datetime_class,
-        )
-
-        datetime_map = (
-            {
-                Types.time: self.data_type.from_import(IMPORT_TIME),
-                Types.date: self.data_type.from_import(IMPORT_DATE),
-                Types.date_time: self.data_type.from_import(IMPORT_DATETIME),
-                Types.timedelta: self.data_type.from_import(IMPORT_TIMEDELTA),
-            }
-            if target_datetime_class is DatetimeClassType.Datetime
-            else {}
-        )
-
-        self.type_map: Dict[Types, DataType] = {
-            **type_map_factory(self.data_type),
-            **datetime_map,
-        }
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic/__init__.py 0.45.0-1/datamodel_code_generator/model/pydantic/__init__.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic/__init__.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.45.0-1/datamodel_code_generator/model/pydantic/__init__.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,47 +0,0 @@
-from __future__ import annotations
-
-from typing import Iterable, Optional
-
-from pydantic import BaseModel as _BaseModel
-
-from .base_model import BaseModel, DataModelField
-from .custom_root_type import CustomRootType
-from .dataclass import DataClass
-from .types import DataTypeManager
-
-
-def dump_resolve_reference_action(class_names: Iterable[str]) -> str:
-    return '\n'.join(
-        f'{class_name}.update_forward_refs()' for class_name in class_names
-    )
-
-
-class Config(_BaseModel):
-    extra: Optional[str] = None
-    title: Optional[str] = None
-    allow_population_by_field_name: Optional[bool] = None
-    allow_extra_fields: Optional[bool] = None
-    allow_mutation: Optional[bool] = None
-    arbitrary_types_allowed: Optional[bool] = None
-    orm_mode: Optional[bool] = None
-
-
-# def get_validator_template() -> Template:
-#     template_file_path: Path = Path('pydantic') / 'one_of_validator.jinja2'
-#     loader = FileSystemLoader(str(TEMPLATE_DIR / template_file_path.parent))
-#     environment: Environment = Environment(loader=loader, autoescape=True)
-#     return environment.get_template(template_file_path.name)
-#
-#
-# VALIDATOR_TEMPLATE: Template = get_validator_template()
-
-
-__all__ = [
-    'BaseModel',
-    'DataModelField',
-    'CustomRootType',
-    'DataClass',
-    'dump_resolve_reference_action',
-    'DataTypeManager',
-    # 'VALIDATOR_TEMPLATE',
-]
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic/base_model.py 0.45.0-1/datamodel_code_generator/model/pydantic/base_model.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic/base_model.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.45.0-1/datamodel_code_generator/model/pydantic/base_model.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,325 +0,0 @@
-from abc import ABC
-from pathlib import Path
-from typing import Any, ClassVar, DefaultDict, Dict, List, Optional, Set, Tuple
-
-from pydantic import Field
-
-from datamodel_code_generator.imports import Import
-from datamodel_code_generator.model import (
-    ConstraintsBase,
-    DataModel,
-    DataModelFieldBase,
-)
-from datamodel_code_generator.model.base import UNDEFINED
-from datamodel_code_generator.model.pydantic.imports import (
-    IMPORT_ANYURL,
-    IMPORT_EXTRA,
-    IMPORT_FIELD,
-)
-from datamodel_code_generator.reference import Reference
-from datamodel_code_generator.types import UnionIntFloat, chain_as_tuple
-from datamodel_code_generator.util import cached_property
-
-
-class Constraints(ConstraintsBase):
-    gt: Optional[UnionIntFloat] = Field(None, alias='exclusiveMinimum')
-    ge: Optional[UnionIntFloat] = Field(None, alias='minimum')
-    lt: Optional[UnionIntFloat] = Field(None, alias='exclusiveMaximum')
-    le: Optional[UnionIntFloat] = Field(None, alias='maximum')
-    multiple_of: Optional[float] = Field(None, alias='multipleOf')
-    min_items: Optional[int] = Field(None, alias='minItems')
-    max_items: Optional[int] = Field(None, alias='maxItems')
-    min_length: Optional[int] = Field(None, alias='minLength')
-    max_length: Optional[int] = Field(None, alias='maxLength')
-    regex: Optional[str] = Field(None, alias='pattern')
-
-
-class DataModelField(DataModelFieldBase):
-    _EXCLUDE_FIELD_KEYS: ClassVar[Set[str]] = {
-        'alias',
-        'default',
-        'const',
-        'gt',
-        'ge',
-        'lt',
-        'le',
-        'multiple_of',
-        'min_items',
-        'max_items',
-        'min_length',
-        'max_length',
-        'regex',
-    }
-    _COMPARE_EXPRESSIONS: ClassVar[Set[str]] = {'gt', 'ge', 'lt', 'le'}
-    constraints: Optional[Constraints] = None
-    _PARSE_METHOD: ClassVar[str] = 'parse_obj'
-
-    @property
-    def method(self) -> Optional[str]:
-        return self.validator
-
-    @property
-    def validator(self) -> Optional[str]:
-        return None
-        # TODO refactor this method for other validation logic
-        # from datamodel_code_generator.model.pydantic import VALIDATOR_TEMPLATE
-        #
-        # return VALIDATOR_TEMPLATE.render(
-        #     field_name=self.name, types=','.join([t.type_hint for t in self.data_types])
-        # )
-
-    @property
-    def field(self) -> Optional[str]:
-        """for backwards compatibility"""
-        result = str(self)
-        if (
-            self.use_default_kwarg
-            and not result.startswith('Field(...')
-            and not result.startswith('Field(default_factory=')
-        ):
-            # Use `default=` for fields that have a default value so that type
-            # checkers using @dataclass_transform can infer the field as
-            # optional in __init__.
-            result = result.replace('Field(', 'Field(default=')
-        if result == '':
-            return None
-
-        return result
-
-    def self_reference(self) -> bool:
-        return isinstance(
-            self.parent, BaseModelBase
-        ) and self.parent.reference.path in {
-            d.reference.path for d in self.data_type.all_data_types if d.reference
-        }
-
-    def _get_strict_field_constraint_value(self, constraint: str, value: Any) -> Any:
-        if value is None or constraint not in self._COMPARE_EXPRESSIONS:
-            return value
-
-        if any(
-            data_type.type == 'float' for data_type in self.data_type.all_data_types
-        ):
-            return float(value)
-        return int(value)
-
-    def _get_default_as_pydantic_model(self) -> Optional[str]:
-        for data_type in self.data_type.data_types or (self.data_type,):
-            # TODO: Check nested data_types
-            if data_type.is_dict or self.data_type.is_union:
-                # TODO: Parse Union and dict model for default
-                continue
-            elif data_type.is_list and len(data_type.data_types) == 1:
-                data_type = data_type.data_types[0]
-                if (
-                    data_type.reference
-                    and isinstance(data_type.reference.source, BaseModelBase)
-                    and isinstance(self.default, list)
-                ):  # pragma: no cover
-                    return f'lambda :[{data_type.alias or data_type.reference.source.class_name}.{self._PARSE_METHOD}(v) for v in {repr(self.default)}]'
-            elif data_type.reference and isinstance(
-                data_type.reference.source, BaseModelBase
-            ):  # pragma: no cover
-                return f'lambda :{data_type.alias or data_type.reference.source.class_name}.{self._PARSE_METHOD}({repr(self.default)})'
-        return None
-
-    def _process_data_in_str(self, data: Dict[str, Any]) -> None:
-        if self.const:
-            data['const'] = True
-
-    def _process_annotated_field_arguments(
-        self, field_arguments: List[str]
-    ) -> List[str]:
-        return field_arguments
-
-    def __str__(self) -> str:
-        data: Dict[str, Any] = {
-            k: v for k, v in self.extras.items() if k not in self._EXCLUDE_FIELD_KEYS
-        }
-        if self.alias:
-            data['alias'] = self.alias
-        if (
-            self.constraints is not None
-            and not self.self_reference()
-            and not self.data_type.strict
-        ):
-            data = {
-                **data,
-                **(
-                    {}
-                    if any(
-                        d.import_ == IMPORT_ANYURL
-                        for d in self.data_type.all_data_types
-                    )
-                    else {
-                        k: self._get_strict_field_constraint_value(k, v)
-                        for k, v in self.constraints.dict(exclude_unset=True).items()
-                    }
-                ),
-            }
-
-        if self.use_field_description:
-            data.pop('description', None)  # Description is part of field docstring
-
-        self._process_data_in_str(data)
-
-        discriminator = data.pop('discriminator', None)
-        if discriminator:
-            if isinstance(discriminator, str):
-                data['discriminator'] = discriminator
-            elif isinstance(discriminator, dict):  # pragma: no cover
-                data['discriminator'] = discriminator['propertyName']
-
-        if self.required:
-            default_factory = None
-        elif self.default and 'default_factory' not in data:
-            default_factory = self._get_default_as_pydantic_model()
-        else:
-            default_factory = data.pop('default_factory', None)
-
-        field_arguments = sorted(
-            f'{k}={repr(v)}' for k, v in data.items() if v is not None
-        )
-
-        if not field_arguments and not default_factory:
-            if self.nullable and self.required:
-                return 'Field(...)'  # Field() is for mypy
-            return ''
-
-        if self.use_annotated:
-            field_arguments = self._process_annotated_field_arguments(field_arguments)
-        elif self.required:
-            field_arguments = ['...', *field_arguments]
-        elif default_factory:
-            field_arguments = [f'default_factory={default_factory}', *field_arguments]
-        else:
-            field_arguments = [f'{repr(self.default)}', *field_arguments]
-
-        return f'Field({", ".join(field_arguments)})'
-
-    @property
-    def annotated(self) -> Optional[str]:
-        if not self.use_annotated or not str(self):
-            return None
-        return f'Annotated[{self.type_hint}, {str(self)}]'
-
-    @property
-    def imports(self) -> Tuple[Import, ...]:
-        if self.field:
-            return chain_as_tuple(super().imports, (IMPORT_FIELD,))
-        return super().imports
-
-
-class BaseModelBase(DataModel, ABC):
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Any]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ) -> None:
-        methods: List[str] = [field.method for field in fields if field.method]
-
-        super().__init__(
-            fields=fields,
-            reference=reference,
-            decorators=decorators,
-            base_classes=base_classes,
-            custom_base_class=custom_base_class,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            methods=methods,
-            path=path,
-            description=description,
-            default=default,
-            nullable=nullable,
-            keyword_only=keyword_only,
-        )
-
-    @cached_property
-    def template_file_path(self) -> Path:
-        # This property is for Backward compatibility
-        # Current version supports '{custom_template_dir}/BaseModel.jinja'
-        # But, Future version will support only '{custom_template_dir}/pydantic/BaseModel.jinja'
-        if self._custom_template_dir is not None:
-            custom_template_file_path = (
-                self._custom_template_dir / Path(self.TEMPLATE_FILE_PATH).name
-            )
-            if custom_template_file_path.exists():
-                return custom_template_file_path
-        return super().template_file_path
-
-
-class BaseModel(BaseModelBase):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'pydantic/BaseModel.jinja2'
-    BASE_CLASS: ClassVar[str] = 'pydantic.BaseModel'
-
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Any]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ) -> None:
-        super().__init__(
-            reference=reference,
-            fields=fields,
-            decorators=decorators,
-            base_classes=base_classes,
-            custom_base_class=custom_base_class,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            path=path,
-            description=description,
-            default=default,
-            nullable=nullable,
-            keyword_only=keyword_only,
-        )
-        config_parameters: Dict[str, Any] = {}
-
-        additionalProperties = self.extra_template_data.get('additionalProperties')
-        allow_extra_fields = self.extra_template_data.get('allow_extra_fields')
-        if additionalProperties is not None or allow_extra_fields:
-            config_parameters['extra'] = (
-                'Extra.allow'
-                if additionalProperties or allow_extra_fields
-                else 'Extra.forbid'
-            )
-            self._additional_imports.append(IMPORT_EXTRA)
-
-        for config_attribute in 'allow_population_by_field_name', 'allow_mutation':
-            if config_attribute in self.extra_template_data:
-                config_parameters[config_attribute] = self.extra_template_data[
-                    config_attribute
-                ]
-        for data_type in self.all_data_types:
-            if data_type.is_custom_type:
-                config_parameters['arbitrary_types_allowed'] = True
-                break
-
-        if isinstance(self.extra_template_data.get('config'), dict):
-            for key, value in self.extra_template_data['config'].items():
-                config_parameters[key] = value
-
-        if config_parameters:
-            from datamodel_code_generator.model.pydantic import Config
-
-            self.extra_template_data['config'] = Config.parse_obj(config_parameters)
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic/custom_root_type.py 0.45.0-1/datamodel_code_generator/model/pydantic/custom_root_type.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic/custom_root_type.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.45.0-1/datamodel_code_generator/model/pydantic/custom_root_type.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,10 +0,0 @@
-from __future__ import annotations
-
-from typing import ClassVar
-
-from datamodel_code_generator.model.pydantic.base_model import BaseModel
-
-
-class CustomRootType(BaseModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'pydantic/BaseModel_root.jinja2'
-    BASE_CLASS: ClassVar[str] = 'pydantic.BaseModel'
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic/dataclass.py 0.45.0-1/datamodel_code_generator/model/pydantic/dataclass.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic/dataclass.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.45.0-1/datamodel_code_generator/model/pydantic/dataclass.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,12 +0,0 @@
-from __future__ import annotations
-
-from typing import ClassVar, Tuple
-
-from datamodel_code_generator.imports import Import
-from datamodel_code_generator.model import DataModel
-from datamodel_code_generator.model.pydantic.imports import IMPORT_DATACLASS
-
-
-class DataClass(DataModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'pydantic/dataclass.jinja2'
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_DATACLASS,)
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic/imports.py 0.45.0-1/datamodel_code_generator/model/pydantic/imports.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic/imports.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.45.0-1/datamodel_code_generator/model/pydantic/imports.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,35 +0,0 @@
-from datamodel_code_generator.imports import Import
-
-IMPORT_CONSTR = Import.from_full_path('pydantic.constr')
-IMPORT_CONINT = Import.from_full_path('pydantic.conint')
-IMPORT_CONFLOAT = Import.from_full_path('pydantic.confloat')
-IMPORT_CONDECIMAL = Import.from_full_path('pydantic.condecimal')
-IMPORT_CONBYTES = Import.from_full_path('pydantic.conbytes')
-IMPORT_POSITIVE_INT = Import.from_full_path('pydantic.PositiveInt')
-IMPORT_NEGATIVE_INT = Import.from_full_path('pydantic.NegativeInt')
-IMPORT_NON_POSITIVE_INT = Import.from_full_path('pydantic.NonPositiveInt')
-IMPORT_NON_NEGATIVE_INT = Import.from_full_path('pydantic.NonNegativeInt')
-IMPORT_POSITIVE_FLOAT = Import.from_full_path('pydantic.PositiveFloat')
-IMPORT_NEGATIVE_FLOAT = Import.from_full_path('pydantic.NegativeFloat')
-IMPORT_NON_NEGATIVE_FLOAT = Import.from_full_path('pydantic.NonNegativeFloat')
-IMPORT_NON_POSITIVE_FLOAT = Import.from_full_path('pydantic.NonPositiveFloat')
-IMPORT_SECRET_STR = Import.from_full_path('pydantic.SecretStr')
-IMPORT_EMAIL_STR = Import.from_full_path('pydantic.EmailStr')
-IMPORT_UUID1 = Import.from_full_path('pydantic.UUID1')
-IMPORT_UUID2 = Import.from_full_path('pydantic.UUID2')
-IMPORT_UUID3 = Import.from_full_path('pydantic.UUID3')
-IMPORT_UUID4 = Import.from_full_path('pydantic.UUID4')
-IMPORT_UUID5 = Import.from_full_path('pydantic.UUID5')
-IMPORT_ANYURL = Import.from_full_path('pydantic.AnyUrl')
-IMPORT_IPV4ADDRESS = Import.from_full_path('ipaddress.IPv4Address')
-IMPORT_IPV6ADDRESS = Import.from_full_path('ipaddress.IPv6Address')
-IMPORT_IPV4NETWORKS = Import.from_full_path('ipaddress.IPv4Network')
-IMPORT_IPV6NETWORKS = Import.from_full_path('ipaddress.IPv6Network')
-IMPORT_EXTRA = Import.from_full_path('pydantic.Extra')
-IMPORT_FIELD = Import.from_full_path('pydantic.Field')
-IMPORT_STRICT_INT = Import.from_full_path('pydantic.StrictInt')
-IMPORT_STRICT_FLOAT = Import.from_full_path('pydantic.StrictFloat')
-IMPORT_STRICT_STR = Import.from_full_path('pydantic.StrictStr')
-IMPORT_STRICT_BOOL = Import.from_full_path('pydantic.StrictBool')
-IMPORT_STRICT_BYTES = Import.from_full_path('pydantic.StrictBytes')
-IMPORT_DATACLASS = Import.from_full_path('pydantic.dataclasses.dataclass')
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic/types.py 0.45.0-1/datamodel_code_generator/model/pydantic/types.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic/types.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.45.0-1/datamodel_code_generator/model/pydantic/types.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,353 +0,0 @@
-from __future__ import annotations
-
-from decimal import Decimal
-from typing import Any, ClassVar, Dict, Optional, Sequence, Set, Type
-
-from datamodel_code_generator.format import DatetimeClassType, PythonVersion
-from datamodel_code_generator.imports import (
-    IMPORT_ANY,
-    IMPORT_DATE,
-    IMPORT_DATETIME,
-    IMPORT_DECIMAL,
-    IMPORT_PATH,
-    IMPORT_PENDULUM_DATE,
-    IMPORT_PENDULUM_DATETIME,
-    IMPORT_PENDULUM_DURATION,
-    IMPORT_PENDULUM_TIME,
-    IMPORT_TIME,
-    IMPORT_TIMEDELTA,
-    IMPORT_UUID,
-)
-from datamodel_code_generator.model.pydantic.imports import (
-    IMPORT_ANYURL,
-    IMPORT_CONBYTES,
-    IMPORT_CONDECIMAL,
-    IMPORT_CONFLOAT,
-    IMPORT_CONINT,
-    IMPORT_CONSTR,
-    IMPORT_EMAIL_STR,
-    IMPORT_IPV4ADDRESS,
-    IMPORT_IPV4NETWORKS,
-    IMPORT_IPV6ADDRESS,
-    IMPORT_IPV6NETWORKS,
-    IMPORT_NEGATIVE_FLOAT,
-    IMPORT_NEGATIVE_INT,
-    IMPORT_NON_NEGATIVE_FLOAT,
-    IMPORT_NON_NEGATIVE_INT,
-    IMPORT_NON_POSITIVE_FLOAT,
-    IMPORT_NON_POSITIVE_INT,
-    IMPORT_POSITIVE_FLOAT,
-    IMPORT_POSITIVE_INT,
-    IMPORT_SECRET_STR,
-    IMPORT_STRICT_BOOL,
-    IMPORT_STRICT_BYTES,
-    IMPORT_STRICT_FLOAT,
-    IMPORT_STRICT_INT,
-    IMPORT_STRICT_STR,
-    IMPORT_UUID1,
-    IMPORT_UUID2,
-    IMPORT_UUID3,
-    IMPORT_UUID4,
-    IMPORT_UUID5,
-)
-from datamodel_code_generator.types import DataType, StrictTypes, Types, UnionIntFloat
-from datamodel_code_generator.types import DataTypeManager as _DataTypeManager
-
-
-def type_map_factory(
-    data_type: Type[DataType],
-    strict_types: Sequence[StrictTypes],
-    pattern_key: str,
-    use_pendulum: bool,
-    target_datetime_class: DatetimeClassType,
-) -> Dict[Types, DataType]:
-    data_type_int = data_type(type='int')
-    data_type_float = data_type(type='float')
-    data_type_str = data_type(type='str')
-    result = {
-        Types.integer: data_type_int,
-        Types.int32: data_type_int,
-        Types.int64: data_type_int,
-        Types.number: data_type_float,
-        Types.float: data_type_float,
-        Types.double: data_type_float,
-        Types.decimal: data_type.from_import(IMPORT_DECIMAL),
-        Types.time: data_type.from_import(IMPORT_TIME),
-        Types.string: data_type_str,
-        Types.byte: data_type_str,  # base64 encoded string
-        Types.binary: data_type(type='bytes'),
-        Types.date: data_type.from_import(IMPORT_DATE),
-        Types.date_time: data_type.from_import(IMPORT_DATETIME),
-        Types.timedelta: data_type.from_import(IMPORT_TIMEDELTA),
-        Types.path: data_type.from_import(IMPORT_PATH),
-        Types.password: data_type.from_import(IMPORT_SECRET_STR),
-        Types.email: data_type.from_import(IMPORT_EMAIL_STR),
-        Types.uuid: data_type.from_import(IMPORT_UUID),
-        Types.uuid1: data_type.from_import(IMPORT_UUID1),
-        Types.uuid2: data_type.from_import(IMPORT_UUID2),
-        Types.uuid3: data_type.from_import(IMPORT_UUID3),
-        Types.uuid4: data_type.from_import(IMPORT_UUID4),
-        Types.uuid5: data_type.from_import(IMPORT_UUID5),
-        Types.uri: data_type.from_import(IMPORT_ANYURL),
-        Types.hostname: data_type.from_import(
-            IMPORT_CONSTR,
-            strict=StrictTypes.str in strict_types,
-            # https://github.com/horejsek/python-fastjsonschema/blob/61c6997a8348b8df9b22e029ca2ba35ef441fbb8/fastjsonschema/draft04.py#L31
-            kwargs={
-                pattern_key: r"r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])\Z'",
-                **({'strict': True} if StrictTypes.str in strict_types else {}),
-            },
-        ),
-        Types.ipv4: data_type.from_import(IMPORT_IPV4ADDRESS),
-        Types.ipv6: data_type.from_import(IMPORT_IPV6ADDRESS),
-        Types.ipv4_network: data_type.from_import(IMPORT_IPV4NETWORKS),
-        Types.ipv6_network: data_type.from_import(IMPORT_IPV6NETWORKS),
-        Types.boolean: data_type(type='bool'),
-        Types.object: data_type.from_import(IMPORT_ANY, is_dict=True),
-        Types.null: data_type(type='None'),
-        Types.array: data_type.from_import(IMPORT_ANY, is_list=True),
-        Types.any: data_type.from_import(IMPORT_ANY),
-    }
-    if use_pendulum:
-        result[Types.date] = data_type.from_import(IMPORT_PENDULUM_DATE)
-        result[Types.date_time] = data_type.from_import(IMPORT_PENDULUM_DATETIME)
-        result[Types.time] = data_type.from_import(IMPORT_PENDULUM_TIME)
-        result[Types.timedelta] = data_type.from_import(IMPORT_PENDULUM_DURATION)
-
-    return result
-
-
-def strict_type_map_factory(data_type: Type[DataType]) -> Dict[StrictTypes, DataType]:
-    return {
-        StrictTypes.int: data_type.from_import(IMPORT_STRICT_INT, strict=True),
-        StrictTypes.float: data_type.from_import(IMPORT_STRICT_FLOAT, strict=True),
-        StrictTypes.bytes: data_type.from_import(IMPORT_STRICT_BYTES, strict=True),
-        StrictTypes.bool: data_type.from_import(IMPORT_STRICT_BOOL, strict=True),
-        StrictTypes.str: data_type.from_import(IMPORT_STRICT_STR, strict=True),
-    }
-
-
-number_kwargs: Set[str] = {
-    'exclusiveMinimum',
-    'minimum',
-    'exclusiveMaximum',
-    'maximum',
-    'multipleOf',
-}
-
-string_kwargs: Set[str] = {'minItems', 'maxItems', 'minLength', 'maxLength', 'pattern'}
-
-byes_kwargs: Set[str] = {'minLength', 'maxLength'}
-
-escape_characters = str.maketrans(
-    {
-        "'": r'\'',
-        '\b': r'\b',
-        '\f': r'\f',
-        '\n': r'\n',
-        '\r': r'\r',
-        '\t': r'\t',
-    }
-)
-
-
-class DataTypeManager(_DataTypeManager):
-    PATTERN_KEY: ClassVar[str] = 'regex'
-
-    def __init__(
-        self,
-        python_version: PythonVersion = PythonVersion.PY_38,
-        use_standard_collections: bool = False,
-        use_generic_container_types: bool = False,
-        strict_types: Optional[Sequence[StrictTypes]] = None,
-        use_non_positive_negative_number_constrained_types: bool = False,
-        use_union_operator: bool = False,
-        use_pendulum: bool = False,
-        target_datetime_class: Optional[DatetimeClassType] = None,
-    ):
-        super().__init__(
-            python_version,
-            use_standard_collections,
-            use_generic_container_types,
-            strict_types,
-            use_non_positive_negative_number_constrained_types,
-            use_union_operator,
-            use_pendulum,
-            target_datetime_class,
-        )
-
-        self.type_map: Dict[Types, DataType] = self.type_map_factory(
-            self.data_type,
-            strict_types=self.strict_types,
-            pattern_key=self.PATTERN_KEY,
-            target_datetime_class=target_datetime_class,
-        )
-        self.strict_type_map: Dict[StrictTypes, DataType] = strict_type_map_factory(
-            self.data_type,
-        )
-
-        self.kwargs_schema_to_model: Dict[str, str] = {
-            'exclusiveMinimum': 'gt',
-            'minimum': 'ge',
-            'exclusiveMaximum': 'lt',
-            'maximum': 'le',
-            'multipleOf': 'multiple_of',
-            'minItems': 'min_items',
-            'maxItems': 'max_items',
-            'minLength': 'min_length',
-            'maxLength': 'max_length',
-            'pattern': self.PATTERN_KEY,
-        }
-
-    def type_map_factory(
-        self,
-        data_type: Type[DataType],
-        strict_types: Sequence[StrictTypes],
-        pattern_key: str,
-        target_datetime_class: DatetimeClassType,
-    ) -> Dict[Types, DataType]:
-        return type_map_factory(
-            data_type,
-            strict_types,
-            pattern_key,
-            self.use_pendulum,
-            self.target_datetime_class,
-        )
-
-    def transform_kwargs(
-        self, kwargs: Dict[str, Any], filter_: Set[str]
-    ) -> Dict[str, str]:
-        return {
-            self.kwargs_schema_to_model.get(k, k): v
-            for (k, v) in kwargs.items()
-            if v is not None and k in filter_
-        }
-
-    def get_data_int_type(
-        self,
-        types: Types,
-        **kwargs: Any,
-    ) -> DataType:
-        data_type_kwargs: Dict[str, Any] = self.transform_kwargs(kwargs, number_kwargs)
-        strict = StrictTypes.int in self.strict_types
-        if data_type_kwargs:
-            if not strict:
-                if data_type_kwargs == {'gt': 0}:
-                    return self.data_type.from_import(IMPORT_POSITIVE_INT)
-                if data_type_kwargs == {'lt': 0}:
-                    return self.data_type.from_import(IMPORT_NEGATIVE_INT)
-                if (
-                    data_type_kwargs == {'ge': 0}
-                    and self.use_non_positive_negative_number_constrained_types
-                ):
-                    return self.data_type.from_import(IMPORT_NON_NEGATIVE_INT)
-                if (
-                    data_type_kwargs == {'le': 0}
-                    and self.use_non_positive_negative_number_constrained_types
-                ):
-                    return self.data_type.from_import(IMPORT_NON_POSITIVE_INT)
-            kwargs = {k: int(v) for k, v in data_type_kwargs.items()}
-            if strict:
-                kwargs['strict'] = True
-            return self.data_type.from_import(IMPORT_CONINT, kwargs=kwargs)
-        if strict:
-            return self.strict_type_map[StrictTypes.int]
-        return self.type_map[types]
-
-    def get_data_float_type(
-        self,
-        types: Types,
-        **kwargs: Any,
-    ) -> DataType:
-        data_type_kwargs = self.transform_kwargs(kwargs, number_kwargs)
-        strict = StrictTypes.float in self.strict_types
-        if data_type_kwargs:
-            if not strict:
-                if data_type_kwargs == {'gt': 0}:
-                    return self.data_type.from_import(IMPORT_POSITIVE_FLOAT)
-                if data_type_kwargs == {'lt': 0}:
-                    return self.data_type.from_import(IMPORT_NEGATIVE_FLOAT)
-                if (
-                    data_type_kwargs == {'ge': 0}
-                    and self.use_non_positive_negative_number_constrained_types
-                ):
-                    return self.data_type.from_import(IMPORT_NON_NEGATIVE_FLOAT)
-                if (
-                    data_type_kwargs == {'le': 0}
-                    and self.use_non_positive_negative_number_constrained_types
-                ):
-                    return self.data_type.from_import(IMPORT_NON_POSITIVE_FLOAT)
-            kwargs = {k: float(v) for k, v in data_type_kwargs.items()}
-            if strict:
-                kwargs['strict'] = True
-            return self.data_type.from_import(IMPORT_CONFLOAT, kwargs=kwargs)
-        if strict:
-            return self.strict_type_map[StrictTypes.float]
-        return self.type_map[types]
-
-    def get_data_decimal_type(self, types: Types, **kwargs: Any) -> DataType:
-        data_type_kwargs = self.transform_kwargs(kwargs, number_kwargs)
-        if data_type_kwargs:
-            return self.data_type.from_import(
-                IMPORT_CONDECIMAL,
-                kwargs={
-                    k: Decimal(str(v) if isinstance(v, UnionIntFloat) else v)
-                    for k, v in data_type_kwargs.items()
-                },
-            )
-        return self.type_map[types]
-
-    def get_data_str_type(self, types: Types, **kwargs: Any) -> DataType:
-        data_type_kwargs: Dict[str, Any] = self.transform_kwargs(kwargs, string_kwargs)
-        strict = StrictTypes.str in self.strict_types
-        if data_type_kwargs:
-            if strict:
-                data_type_kwargs['strict'] = True
-            if self.PATTERN_KEY in data_type_kwargs:
-                escaped_regex = data_type_kwargs[self.PATTERN_KEY].translate(
-                    escape_characters
-                )
-                # TODO: remove unneeded escaped characters
-                data_type_kwargs[self.PATTERN_KEY] = f"r'{escaped_regex}'"
-            return self.data_type.from_import(IMPORT_CONSTR, kwargs=data_type_kwargs)
-        if strict:
-            return self.strict_type_map[StrictTypes.str]
-        return self.type_map[types]
-
-    def get_data_bytes_type(self, types: Types, **kwargs: Any) -> DataType:
-        data_type_kwargs: Dict[str, Any] = self.transform_kwargs(kwargs, byes_kwargs)
-        strict = StrictTypes.bytes in self.strict_types
-        if data_type_kwargs:
-            if not strict:
-                return self.data_type.from_import(
-                    IMPORT_CONBYTES, kwargs=data_type_kwargs
-                )
-        # conbytes doesn't accept strict argument
-        # https://github.com/samuelcolvin/pydantic/issues/2489
-        #    if strict:
-        #         data_type_kwargs['strict'] = True
-        #     return self.data_type.from_import(IMPORT_CONBYTES, kwargs=data_type_kwargs)
-        if strict:
-            return self.strict_type_map[StrictTypes.bytes]
-        return self.type_map[types]
-
-    def get_data_type(
-        self,
-        types: Types,
-        **kwargs: Any,
-    ) -> DataType:
-        if types == Types.string:
-            return self.get_data_str_type(types, **kwargs)
-        elif types in (Types.int32, Types.int64, Types.integer):
-            return self.get_data_int_type(types, **kwargs)
-        elif types in (Types.float, Types.double, Types.number, Types.time):
-            return self.get_data_float_type(types, **kwargs)
-        elif types == Types.decimal:
-            return self.get_data_decimal_type(types, **kwargs)
-        elif types == Types.binary:
-            return self.get_data_bytes_type(types, **kwargs)
-        elif types == Types.boolean:
-            if StrictTypes.bool in self.strict_types:
-                return self.strict_type_map[StrictTypes.bool]
-
-        return self.type_map[types]
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic_v2/__init__.py 0.45.0-1/datamodel_code_generator/model/pydantic_v2/__init__.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic_v2/__init__.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.45.0-1/datamodel_code_generator/model/pydantic_v2/__init__.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,36 +0,0 @@
-from __future__ import annotations
-
-from typing import Iterable, Optional, Tuple
-
-from pydantic import BaseModel as _BaseModel
-
-from .base_model import BaseModel, DataModelField, UnionMode
-from .root_model import RootModel
-from .types import DataTypeManager
-
-
-def dump_resolve_reference_action(class_names: Iterable[str]) -> str:
-    return '\n'.join(f'{class_name}.model_rebuild()' for class_name in class_names)
-
-
-class ConfigDict(_BaseModel):
-    extra: Optional[str] = None
-    title: Optional[str] = None
-    populate_by_name: Optional[bool] = None
-    allow_extra_fields: Optional[bool] = None
-    from_attributes: Optional[bool] = None
-    frozen: Optional[bool] = None
-    arbitrary_types_allowed: Optional[bool] = None
-    protected_namespaces: Optional[Tuple[str, ...]] = None
-    regex_engine: Optional[str] = None
-    use_enum_values: Optional[bool] = None
-
-
-__all__ = [
-    'BaseModel',
-    'DataModelField',
-    'RootModel',
-    'dump_resolve_reference_action',
-    'DataTypeManager',
-    'UnionMode',
-]
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic_v2/base_model.py 0.45.0-1/datamodel_code_generator/model/pydantic_v2/base_model.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic_v2/base_model.py	2024-12-15 17:25:57.704037200 +0000
+++ 0.45.0-1/datamodel_code_generator/model/pydantic_v2/base_model.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,247 +0,0 @@
-import re
-from enum import Enum
-from pathlib import Path
-from typing import (
-    Any,
-    ClassVar,
-    DefaultDict,
-    Dict,
-    List,
-    NamedTuple,
-    Optional,
-    Set,
-)
-
-from pydantic import Field
-from typing_extensions import Literal
-
-from datamodel_code_generator.model.base import UNDEFINED, DataModelFieldBase
-from datamodel_code_generator.model.pydantic.base_model import (
-    BaseModelBase,
-)
-from datamodel_code_generator.model.pydantic.base_model import (
-    Constraints as _Constraints,
-)
-from datamodel_code_generator.model.pydantic.base_model import (
-    DataModelField as DataModelFieldV1,
-)
-from datamodel_code_generator.model.pydantic_v2.imports import IMPORT_CONFIG_DICT
-from datamodel_code_generator.reference import Reference
-from datamodel_code_generator.util import field_validator, model_validator
-
-
-class UnionMode(Enum):
-    smart = 'smart'
-    left_to_right = 'left_to_right'
-
-
-class Constraints(_Constraints):
-    # To override existing pattern alias
-    regex: Optional[str] = Field(None, alias='regex')
-    pattern: Optional[str] = Field(None, alias='pattern')
-
-    @model_validator(mode='before')
-    def validate_min_max_items(cls, values: Any) -> Dict[str, Any]:
-        if not isinstance(values, dict):  # pragma: no cover
-            return values
-        min_items = values.pop('minItems', None)
-        if min_items is not None:
-            values['minLength'] = min_items
-        max_items = values.pop('maxItems', None)
-        if max_items is not None:
-            values['maxLength'] = max_items
-        return values
-
-
-class DataModelField(DataModelFieldV1):
-    _EXCLUDE_FIELD_KEYS: ClassVar[Set[str]] = {
-        'alias',
-        'default',
-        'gt',
-        'ge',
-        'lt',
-        'le',
-        'multiple_of',
-        'min_length',
-        'max_length',
-        'pattern',
-    }
-    _DEFAULT_FIELD_KEYS: ClassVar[Set[str]] = {
-        'default',
-        'default_factory',
-        'alias',
-        'alias_priority',
-        'validation_alias',
-        'serialization_alias',
-        'title',
-        'description',
-        'examples',
-        'exclude',
-        'discriminator',
-        'json_schema_extra',
-        'frozen',
-        'validate_default',
-        'repr',
-        'init_var',
-        'kw_only',
-        'pattern',
-        'strict',
-        'gt',
-        'ge',
-        'lt',
-        'le',
-        'multiple_of',
-        'allow_inf_nan',
-        'max_digits',
-        'decimal_places',
-        'min_length',
-        'max_length',
-        'union_mode',
-    }
-    constraints: Optional[Constraints] = None
-    _PARSE_METHOD: ClassVar[str] = 'model_validate'
-    can_have_extra_keys: ClassVar[bool] = False
-
-    @field_validator('extras')
-    def validate_extras(cls, values: Any) -> Dict[str, Any]:
-        if not isinstance(values, dict):  # pragma: no cover
-            return values
-        if 'examples' in values:
-            return values
-
-        if 'example' in values:
-            values['examples'] = [values.pop('example')]
-        return values
-
-    def process_const(self) -> None:
-        if 'const' not in self.extras:
-            return None
-        self.const = True
-        self.nullable = False
-        const = self.extras['const']
-        self.data_type = self.data_type.__class__(literals=[const])
-        if not self.default:
-            self.default = const
-
-    def _process_data_in_str(self, data: Dict[str, Any]) -> None:
-        if self.const:
-            # const is removed in pydantic 2.0
-            data.pop('const')
-
-        # unique_items is not supported in pydantic 2.0
-        data.pop('unique_items', None)
-
-        if 'union_mode' in data:
-            if self.data_type.is_union:
-                data['union_mode'] = data.pop('union_mode').value
-            else:
-                data.pop('union_mode')
-
-        # **extra is not supported in pydantic 2.0
-        json_schema_extra = {
-            k: v for k, v in data.items() if k not in self._DEFAULT_FIELD_KEYS
-        }
-        if json_schema_extra:
-            data['json_schema_extra'] = json_schema_extra
-            for key in json_schema_extra.keys():
-                data.pop(key)
-
-    def _process_annotated_field_arguments(
-        self,
-        field_arguments: List[str],
-    ) -> List[str]:
-        return field_arguments
-
-
-class ConfigAttribute(NamedTuple):
-    from_: str
-    to: str
-    invert: bool
-
-
-class BaseModel(BaseModelBase):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'pydantic_v2/BaseModel.jinja2'
-    BASE_CLASS: ClassVar[str] = 'pydantic.BaseModel'
-    CONFIG_ATTRIBUTES: ClassVar[List[ConfigAttribute]] = [
-        ConfigAttribute('allow_population_by_field_name', 'populate_by_name', False),
-        ConfigAttribute('populate_by_name', 'populate_by_name', False),
-        ConfigAttribute('allow_mutation', 'frozen', True),
-        ConfigAttribute('frozen', 'frozen', False),
-    ]
-
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Any]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ) -> None:
-        super().__init__(
-            reference=reference,
-            fields=fields,
-            decorators=decorators,
-            base_classes=base_classes,
-            custom_base_class=custom_base_class,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            path=path,
-            description=description,
-            default=default,
-            nullable=nullable,
-            keyword_only=keyword_only,
-        )
-        config_parameters: Dict[str, Any] = {}
-
-        extra = self._get_config_extra()
-        if extra:
-            config_parameters['extra'] = extra
-
-        for from_, to, invert in self.CONFIG_ATTRIBUTES:
-            if from_ in self.extra_template_data:
-                config_parameters[to] = (
-                    not self.extra_template_data[from_]
-                    if invert
-                    else self.extra_template_data[from_]
-                )
-        for data_type in self.all_data_types:
-            if data_type.is_custom_type:  # pragma: no cover
-                config_parameters['arbitrary_types_allowed'] = True
-                break
-
-        for field in self.fields:
-            # Check if a regex pattern uses lookarounds.
-            # Depending on the generation configuration, the pattern may end up in two different places.
-            pattern = (
-                isinstance(field.constraints, Constraints) and field.constraints.pattern
-            ) or (field.data_type.kwargs or {}).get('pattern')
-            if pattern and re.search(r'\(\?<?[=!]', pattern):
-                config_parameters['regex_engine'] = '"python-re"'
-                break
-
-        if isinstance(self.extra_template_data.get('config'), dict):
-            for key, value in self.extra_template_data['config'].items():
-                config_parameters[key] = value
-
-        if config_parameters:
-            from datamodel_code_generator.model.pydantic_v2 import ConfigDict
-
-            self.extra_template_data['config'] = ConfigDict.parse_obj(config_parameters)
-            self._additional_imports.append(IMPORT_CONFIG_DICT)
-
-    def _get_config_extra(self) -> Optional[Literal["'allow'", "'forbid'"]]:
-        additionalProperties = self.extra_template_data.get('additionalProperties')
-        allow_extra_fields = self.extra_template_data.get('allow_extra_fields')
-        if additionalProperties is not None or allow_extra_fields:
-            return (
-                "'allow'" if additionalProperties or allow_extra_fields else "'forbid'"
-            )
-        return None
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic_v2/imports.py 0.45.0-1/datamodel_code_generator/model/pydantic_v2/imports.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic_v2/imports.py	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/pydantic_v2/imports.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-from datamodel_code_generator.imports import Import
-
-IMPORT_CONFIG_DICT = Import.from_full_path('pydantic.ConfigDict')
-IMPORT_AWARE_DATETIME = Import.from_full_path('pydantic.AwareDatetime')
-IMPORT_NAIVE_DATETIME = Import.from_full_path('pydantic.NaiveDatetime')
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic_v2/root_model.py 0.45.0-1/datamodel_code_generator/model/pydantic_v2/root_model.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic_v2/root_model.py	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/pydantic_v2/root_model.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,25 +0,0 @@
-from __future__ import annotations
-
-from typing import Any, ClassVar, Literal, Optional
-
-from datamodel_code_generator.model.pydantic_v2.base_model import BaseModel
-
-
-class RootModel(BaseModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'pydantic_v2/RootModel.jinja2'
-    BASE_CLASS: ClassVar[str] = 'pydantic.RootModel'
-
-    def __init__(
-        self,
-        **kwargs: Any,
-    ) -> None:
-        # Remove custom_base_class for Pydantic V2 models; behaviour is different from Pydantic V1 as it will not
-        # be treated as a root model. custom_base_class cannot both implement BaseModel and RootModel!
-        if 'custom_base_class' in kwargs:
-            kwargs.pop('custom_base_class')
-
-        super().__init__(**kwargs)
-
-    def _get_config_extra(self) -> Optional[Literal["'allow'", "'forbid'"]]:
-        # PydanticV2 RootModels cannot have extra fields
-        return None
diff -pruN 0.26.4-3/datamodel_code_generator/model/pydantic_v2/types.py 0.45.0-1/datamodel_code_generator/model/pydantic_v2/types.py
--- 0.26.4-3/datamodel_code_generator/model/pydantic_v2/types.py	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/pydantic_v2/types.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,43 +0,0 @@
-from __future__ import annotations
-
-from typing import ClassVar, Dict, Optional, Sequence, Type
-
-from datamodel_code_generator.format import DatetimeClassType
-from datamodel_code_generator.model.pydantic import DataTypeManager as _DataTypeManager
-from datamodel_code_generator.model.pydantic.imports import IMPORT_CONSTR
-from datamodel_code_generator.model.pydantic_v2.imports import (
-    IMPORT_AWARE_DATETIME,
-    IMPORT_NAIVE_DATETIME,
-)
-from datamodel_code_generator.types import DataType, StrictTypes, Types
-
-
-class DataTypeManager(_DataTypeManager):
-    PATTERN_KEY: ClassVar[str] = 'pattern'
-
-    def type_map_factory(
-        self,
-        data_type: Type[DataType],
-        strict_types: Sequence[StrictTypes],
-        pattern_key: str,
-        target_datetime_class: Optional[DatetimeClassType] = None,
-    ) -> Dict[Types, DataType]:
-        result = {
-            **super().type_map_factory(
-                data_type, strict_types, pattern_key, target_datetime_class
-            ),
-            Types.hostname: self.data_type.from_import(
-                IMPORT_CONSTR,
-                strict=StrictTypes.str in strict_types,
-                # https://github.com/horejsek/python-fastjsonschema/blob/61c6997a8348b8df9b22e029ca2ba35ef441fbb8/fastjsonschema/draft04.py#L31
-                kwargs={
-                    pattern_key: r"r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])$'",
-                    **({'strict': True} if StrictTypes.str in strict_types else {}),
-                },
-            ),
-        }
-        if target_datetime_class == DatetimeClassType.Awaredatetime:
-            result[Types.date_time] = data_type.from_import(IMPORT_AWARE_DATETIME)
-        if target_datetime_class == DatetimeClassType.Naivedatetime:
-            result[Types.date_time] = data_type.from_import(IMPORT_NAIVE_DATETIME)
-        return result
diff -pruN 0.26.4-3/datamodel_code_generator/model/rootmodel.py 0.45.0-1/datamodel_code_generator/model/rootmodel.py
--- 0.26.4-3/datamodel_code_generator/model/rootmodel.py	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/rootmodel.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,9 +0,0 @@
-from __future__ import annotations
-
-from typing import ClassVar
-
-from datamodel_code_generator.model import DataModel
-
-
-class RootModel(DataModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'root.jinja2'
diff -pruN 0.26.4-3/datamodel_code_generator/model/scalar.py 0.45.0-1/datamodel_code_generator/model/scalar.py
--- 0.26.4-3/datamodel_code_generator/model/scalar.py	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/scalar.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,80 +0,0 @@
-from __future__ import annotations
-
-from collections import defaultdict
-from pathlib import Path
-from typing import Any, ClassVar, DefaultDict, Dict, List, Optional, Tuple
-
-from datamodel_code_generator.imports import IMPORT_TYPE_ALIAS, Import
-from datamodel_code_generator.model import DataModel, DataModelFieldBase
-from datamodel_code_generator.model.base import UNDEFINED
-from datamodel_code_generator.reference import Reference
-
-_INT: str = 'int'
-_FLOAT: str = 'float'
-_BOOLEAN: str = 'bool'
-_STR: str = 'str'
-
-# default graphql scalar types
-DEFAULT_GRAPHQL_SCALAR_TYPE = _STR
-
-DEFAULT_GRAPHQL_SCALAR_TYPES: Dict[str, str] = {
-    'Boolean': _BOOLEAN,
-    'String': _STR,
-    'ID': _STR,
-    'Int': _INT,
-    'Float': _FLOAT,
-}
-
-
-class DataTypeScalar(DataModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'Scalar.jinja2'
-    BASE_CLASS: ClassVar[str] = ''
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_TYPE_ALIAS,)
-
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        methods: Optional[List[str]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ):
-        extra_template_data = extra_template_data or defaultdict(dict)
-
-        scalar_name = reference.name
-        if scalar_name not in extra_template_data:
-            extra_template_data[scalar_name] = defaultdict(dict)
-
-        # py_type
-        py_type = extra_template_data[scalar_name].get(
-            'py_type',
-            DEFAULT_GRAPHQL_SCALAR_TYPES.get(
-                reference.name, DEFAULT_GRAPHQL_SCALAR_TYPE
-            ),
-        )
-        extra_template_data[scalar_name]['py_type'] = py_type
-
-        super().__init__(
-            reference=reference,
-            fields=fields,
-            decorators=decorators,
-            base_classes=base_classes,
-            custom_base_class=custom_base_class,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            methods=methods,
-            path=path,
-            description=description,
-            default=default,
-            nullable=nullable,
-            keyword_only=keyword_only,
-        )
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/Enum.jinja2 0.45.0-1/datamodel_code_generator/model/template/Enum.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/Enum.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/template/Enum.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,17 +0,0 @@
-{% for decorator in decorators -%}
-{{ decorator }}
-{% endfor -%}
-class {{ class_name }}({{ base_class }}):
-{%- if description %}
-    """
-    {{ description | indent(4) }}
-    """
-{%- endif %}
-{%- for field in fields %}
-    {{ field.name }} = {{ field.default }}
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- endfor -%}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/Scalar.jinja2 0.45.0-1/datamodel_code_generator/model/template/Scalar.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/Scalar.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/template/Scalar.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-{{ class_name }}: TypeAlias = {{ py_type }}
-{%- if description %}
-"""
-{{ description }}
-"""
-{%- endif %}
\ No newline at end of file
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/TypedDict.jinja2 0.45.0-1/datamodel_code_generator/model/template/TypedDict.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/TypedDict.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/template/TypedDict.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-{%- if is_functional_syntax %}
-{% include 'TypedDictFunction.jinja2' %}
-{%- else %}
-{% include 'TypedDictClass.jinja2' %}
-{%- endif %}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/TypedDictClass.jinja2 0.45.0-1/datamodel_code_generator/model/template/TypedDictClass.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/TypedDictClass.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/template/TypedDictClass.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,17 +0,0 @@
-class {{ class_name }}({{ base_class }}):
-{%- if description %}
-    """
-    {{ description | indent(4) }}
-    """
-{%- endif %}
-{%- if not fields and not description %}
-    pass
-{%- endif %}
-{%- for field in fields %}
-    {{ field.name }}: {{ field.type_hint }}
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- endfor -%}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/TypedDictFunction.jinja2 0.45.0-1/datamodel_code_generator/model/template/TypedDictFunction.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/TypedDictFunction.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/template/TypedDictFunction.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,16 +0,0 @@
-{%- if description %}
-"""
-{{ description | indent(4) }}
-"""
-{%- endif %}
-{{ class_name }} = TypedDict('{{ class_name }}', {
-{%- for field in all_fields %}
-    '{{ field.key }}': {{ field.type_hint }},
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- endfor -%}
-})
-
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/Union.jinja2 0.45.0-1/datamodel_code_generator/model/template/Union.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/Union.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/template/Union.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,10 +0,0 @@
-{%- if description %}
-# {{ description }}
-{%- endif %}
-{%- if fields|length > 1 %}
-{{ class_name }}: TypeAlias = Union[
-{%- for field in fields %}
-    '{{ field.name }}',
-{%- endfor %}
-]{% else %}
-{{ class_name }}: TypeAlias = {{ fields[0].name }}{% endif %}
\ No newline at end of file
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/dataclass.jinja2 0.45.0-1/datamodel_code_generator/model/template/dataclass.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/dataclass.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/template/dataclass.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,32 +0,0 @@
-{% for decorator in decorators -%}
-{{ decorator }}
-{% endfor -%}
-@dataclass{%- if keyword_only -%}(kw_only=True){%- endif %}
-{%- if base_class %}
-class {{ class_name }}({{ base_class }}):
-{%- else %}
-class {{ class_name }}:
-{%- endif %}
-{%- if description %}
-    """
-    {{ description | indent(4) }}
-    """
-{%- endif %}
-{%- if not fields and not description %}
-    pass
-{%- endif %}
-{%- for field in fields -%}
-    {%- if field.field %}
-    {{ field.name }}: {{ field.type_hint }} = {{ field.field }}
-    {%- else %}
-    {{ field.name }}: {{ field.type_hint }}
-    {%- if not (field.required or (field.represented_default == 'None' and field.strip_default_none))
-            %} = {{ field.represented_default }}
-    {%- endif -%}
-    {%- endif %}
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- endfor -%}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/msgspec.jinja2 0.45.0-1/datamodel_code_generator/model/template/msgspec.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/msgspec.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/template/msgspec.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,42 +0,0 @@
-{% for decorator in decorators -%}
-{{ decorator }}
-{% endfor -%}
-{%- if base_class %}
-class {{ class_name }}({{ base_class }}{%- for key, value in (base_class_kwargs|default({})).items() -%}
-, {{ key }}={{ value }}
-{%- endfor -%}):
-{%- else %}
-class {{ class_name }}:
-{%- endif %}
-{%- if description %}
-    """
-    {{ description | indent(4) }}
-    """
-{%- endif %}
-{%- if not fields and not description %}
-    pass
-{%- endif %}
-{%- for field in fields -%}
-    {%- if not field.annotated and field.field %}
-    {{ field.name }}: {{ field.type_hint }} = {{ field.field }}
-    {%- else %}
-    {%- if field.annotated and not field.field %}
-    {{ field.name }}: {{ field.annotated }}
-    {%- elif field.annotated and field.field %}
-    {{ field.name }}: {{ field.annotated }} = {{ field.field }}
-    {%- else %}
-    {{ field.name }}: {{ field.type_hint }}
-    {%- endif %}
-    {%- if not field.field and (not field.required or field.data_type.is_optional or field.nullable)
-            %} = {{ field.represented_default }}
-    {%- endif -%}
-    {%- endif %}
-
-
-
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- endfor -%}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2 0.45.0-1/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,39 +0,0 @@
-{% for decorator in decorators -%}
-{{ decorator }}
-{% endfor -%}
-class {{ class_name }}({{ base_class }}):{% if comment is defined %}  # {{ comment }}{% endif %}
-{%- if description %}
-    """
-    {{ description | indent(4) }}
-    """
-{%- endif %}
-{%- if not fields and not description %}
-    pass
-{%- endif %}
-{%- if config %}
-{%- filter indent(4) %}
-{% include 'Config.jinja2' %}
-{%- endfilter %}
-{%- endif %}
-{%- for field in fields -%}
-    {%- if not field.annotated and field.field %}
-    {{ field.name }}: {{ field.type_hint }} = {{ field.field }}
-    {%- else %}
-    {%- if field.annotated %}
-    {{ field.name }}: {{ field.annotated }}
-    {%- else %}
-    {{ field.name }}: {{ field.type_hint }}
-    {%- endif %}
-    {%- if not (field.required or (field.represented_default == 'None' and field.strip_default_none))
-            %} = {{ field.represented_default }}
-    {%- endif -%}
-    {%- endif %}
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- for method in methods -%}
-    {{ method }}
-{%- endfor -%}
-{%- endfor -%}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2 0.45.0-1/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,36 +0,0 @@
-{% for decorator in decorators -%}
-{{ decorator }}
-{% endfor -%}
-class {{ class_name }}({{ base_class }}):{% if comment is defined %}  # {{ comment }}{% endif %}
-{%- if description %}
-    """
-    {{ description | indent(4) }}
-    """
-{%- endif %}
-{%- if config %}
-{%- filter indent(4) %}
-{% include 'Config.jinja2' %}
-{%- endfilter %}
-{%- endif %}
-{%- if not fields and not description %}
-    pass
-{%- else %}
-    {%- set field = fields[0] %}
-    {%- if not field.annotated and field.field %}
-    __root__: {{ field.type_hint }} = {{ field.field }}
-    {%- else %}
-    {%- if field.annotated %}
-    __root__: {{ field.annotated }}
-    {%- else %}
-    __root__: {{ field.type_hint }}
-    {%- endif %}
-    {%- if not (field.required or (field.represented_default == 'None' and field.strip_default_none))
-            %} = {{ field.represented_default }}
-    {%- endif -%}
-    {%- endif %}
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- endif %}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/pydantic/Config.jinja2 0.45.0-1/datamodel_code_generator/model/template/pydantic/Config.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/pydantic/Config.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/template/pydantic/Config.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,4 +0,0 @@
-class Config:
-{%- for field_name, value in config.dict(exclude_unset=True).items() %}
-    {{ field_name }} = {{ value }}
-{%- endfor %}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/pydantic/dataclass.jinja2 0.45.0-1/datamodel_code_generator/model/template/pydantic/dataclass.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/pydantic/dataclass.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/template/pydantic/dataclass.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,29 +0,0 @@
-{% for decorator in decorators -%}
-{{ decorator }}
-{% endfor -%}
-@dataclass
-{%- if base_class %}
-class {{ class_name }}({{ base_class }}):
-{%- else %}
-class {{ class_name }}:
-{%- endif %}
-{%- if description %}
-    """
-    {{ description | indent(4) }}
-    """
-{%- endif %}
-{%- if not fields %}
-    pass
-{%- endif %}
-{%- for field in fields -%}
-    {%- if field.default %}
-    {{ field.name }}: {{ field.type_hint }} = {{field.default}}
-    {%- else %}
-    {{ field.name }}: {{ field.type_hint }}
-    {%- endif %}
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- endfor -%}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2 0.45.0-1/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,39 +0,0 @@
-{% for decorator in decorators -%}
-{{ decorator }}
-{% endfor -%}
-class {{ class_name }}({{ base_class }}):{% if comment is defined %}  # {{ comment }}{% endif %}
-{%- if description %}
-    """
-    {{ description | indent(4) }}
-    """
-{%- endif %}
-{%- if not fields and not description %}
-    pass
-{%- endif %}
-{%- if config %}
-{%- filter indent(4) %}
-{% include 'ConfigDict.jinja2' %}
-{%- endfilter %}
-{%- endif %}
-{%- for field in fields -%}
-    {%- if not field.annotated and field.field %}
-    {{ field.name }}: {{ field.type_hint }} = {{ field.field }}
-    {%- else %}
-    {%- if field.annotated %}
-    {{ field.name }}: {{ field.annotated }}
-    {%- else %}
-    {{ field.name }}: {{ field.type_hint }}
-    {%- endif %}
-    {%- if not (field.required or (field.represented_default == 'None' and field.strip_default_none)) or field.data_type.is_optional
-            %} = {{ field.represented_default }}
-    {%- endif -%}
-    {%- endif %}
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- for method in methods -%}
-    {{ method }}
-{%- endfor -%}
-{%- endfor -%}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2 0.45.0-1/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,5 +0,0 @@
-model_config = ConfigDict(
-{%- for field_name, value in config.dict(exclude_unset=True).items() %}
-    {{ field_name }}={{ value }},
-{%- endfor %}
-)
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2 0.45.0-1/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,45 +0,0 @@
-{%- macro get_type_hint(_fields) -%}
-{%- if _fields -%}
-{#There will only ever be a single field for RootModel#}
-{{- _fields[0].type_hint}}
-{%- endif -%}
-{%- endmacro -%}
-
-
-{% for decorator in decorators -%}
-{{ decorator }}
-{% endfor -%}
-
-class {{ class_name }}({{ base_class }}{%- if fields -%}[{{get_type_hint(fields)}}]{%- endif -%}):{% if comment is defined %}  # {{ comment }}{% endif %}
-{%- if description %}
-    """
-    {{ description | indent(4) }}
-    """
-{%- endif %}
-{%- if config %}
-{%- filter indent(4) %}
-{% include 'ConfigDict.jinja2' %}
-{%- endfilter %}
-{%- endif %}
-{%- if not fields and not description %}
-    pass
-{%- else %}
-    {%- set field = fields[0] %}
-    {%- if not field.annotated and field.field %}
-    root: {{ field.type_hint }} = {{ field.field }}
-    {%- else %}
-    {%- if field.annotated %}
-    root: {{ field.annotated }}
-    {%- else %}
-    root: {{ field.type_hint }}
-    {%- endif %}
-    {%- if not (field.required or (field.represented_default == 'None' and field.strip_default_none))
-            %} = {{ field.represented_default }}
-    {%- endif -%}
-    {%- endif %}
-    {%- if field.docstring %}
-    """
-    {{ field.docstring | indent(4) }}
-    """
-    {%- endif %}
-{%- endif %}
diff -pruN 0.26.4-3/datamodel_code_generator/model/template/root.jinja2 0.45.0-1/datamodel_code_generator/model/template/root.jinja2
--- 0.26.4-3/datamodel_code_generator/model/template/root.jinja2	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/template/root.jinja2	1970-01-01 00:00:00.000000000 +0000
@@ -1,6 +0,0 @@
-{%- set field = fields[0] %}
-{%- if field.annotated %}
-{{ class_name }} = {{ field.annotated }}
-{%- else %}
-{{ class_name }} = {{ field.type_hint }}
-{%- endif %}
diff -pruN 0.26.4-3/datamodel_code_generator/model/typed_dict.py 0.45.0-1/datamodel_code_generator/model/typed_dict.py
--- 0.26.4-3/datamodel_code_generator/model/typed_dict.py	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/typed_dict.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,155 +0,0 @@
-import keyword
-from pathlib import Path
-from typing import (
-    Any,
-    ClassVar,
-    DefaultDict,
-    Dict,
-    Iterator,
-    List,
-    Optional,
-    Tuple,
-)
-
-from datamodel_code_generator.imports import Import
-from datamodel_code_generator.model import DataModel, DataModelFieldBase
-from datamodel_code_generator.model.base import UNDEFINED
-from datamodel_code_generator.model.imports import (
-    IMPORT_NOT_REQUIRED,
-    IMPORT_NOT_REQUIRED_BACKPORT,
-    IMPORT_TYPED_DICT,
-    IMPORT_TYPED_DICT_BACKPORT,
-)
-from datamodel_code_generator.reference import Reference
-from datamodel_code_generator.types import NOT_REQUIRED_PREFIX
-
-escape_characters = str.maketrans(
-    {
-        '\\': r'\\',
-        "'": r'\'',
-        '\b': r'\b',
-        '\f': r'\f',
-        '\n': r'\n',
-        '\r': r'\r',
-        '\t': r'\t',
-    }
-)
-
-
-def _is_valid_field_name(field: DataModelFieldBase) -> bool:
-    name = field.original_name or field.name
-    if name is None:  # pragma: no cover
-        return False
-    return name.isidentifier() and not keyword.iskeyword(name)
-
-
-class TypedDict(DataModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'TypedDict.jinja2'
-    BASE_CLASS: ClassVar[str] = 'typing.TypedDict'
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_TYPED_DICT,)
-
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        methods: Optional[List[str]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ) -> None:
-        super().__init__(
-            reference=reference,
-            fields=fields,
-            decorators=decorators,
-            base_classes=base_classes,
-            custom_base_class=custom_base_class,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            methods=methods,
-            path=path,
-            description=description,
-            default=default,
-            nullable=nullable,
-            keyword_only=keyword_only,
-        )
-
-    @property
-    def is_functional_syntax(self) -> bool:
-        return any(not _is_valid_field_name(f) for f in self.fields)
-
-    @property
-    def all_fields(self) -> Iterator[DataModelFieldBase]:
-        for base_class in self.base_classes:
-            if base_class.reference is None:  # pragma: no cover
-                continue
-            data_model = base_class.reference.source
-            if not isinstance(data_model, DataModel):  # pragma: no cover
-                continue
-
-            if isinstance(data_model, TypedDict):  # pragma: no cover
-                yield from data_model.all_fields
-
-        yield from self.fields
-
-    def render(self, *, class_name: Optional[str] = None) -> str:
-        response = self._render(
-            class_name=class_name or self.class_name,
-            fields=self.fields,
-            decorators=self.decorators,
-            base_class=self.base_class,
-            methods=self.methods,
-            description=self.description,
-            is_functional_syntax=self.is_functional_syntax,
-            all_fields=self.all_fields,
-            **self.extra_template_data,
-        )
-        return response
-
-
-class TypedDictBackport(TypedDict):
-    BASE_CLASS: ClassVar[str] = 'typing_extensions.TypedDict'
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_TYPED_DICT_BACKPORT,)
-
-
-class DataModelField(DataModelFieldBase):
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_NOT_REQUIRED,)
-
-    @property
-    def key(self) -> str:
-        return (self.original_name or self.name or '').translate(  # pragma: no cover
-            escape_characters
-        )
-
-    @property
-    def type_hint(self) -> str:
-        type_hint = super().type_hint
-        if self._not_required:
-            return f'{NOT_REQUIRED_PREFIX}{type_hint}]'
-        return type_hint
-
-    @property
-    def _not_required(self) -> bool:
-        return not self.required and isinstance(self.parent, TypedDict)
-
-    @property
-    def fall_back_to_nullable(self) -> bool:
-        return not self._not_required
-
-    @property
-    def imports(self) -> Tuple[Import, ...]:
-        return (
-            *super().imports,
-            *(self.DEFAULT_IMPORTS if self._not_required else ()),
-        )
-
-
-class DataModelFieldBackport(DataModelField):
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (IMPORT_NOT_REQUIRED_BACKPORT,)
diff -pruN 0.26.4-3/datamodel_code_generator/model/types.py 0.45.0-1/datamodel_code_generator/model/types.py
--- 0.26.4-3/datamodel_code_generator/model/types.py	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/types.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,85 +0,0 @@
-from typing import Any, Dict, Optional, Sequence, Type
-
-from datamodel_code_generator import DatetimeClassType, PythonVersion
-from datamodel_code_generator.imports import (
-    IMPORT_ANY,
-    IMPORT_DECIMAL,
-    IMPORT_TIMEDELTA,
-)
-from datamodel_code_generator.types import DataType, StrictTypes, Types
-from datamodel_code_generator.types import DataTypeManager as _DataTypeManager
-
-
-def type_map_factory(data_type: Type[DataType]) -> Dict[Types, DataType]:
-    data_type_int = data_type(type='int')
-    data_type_float = data_type(type='float')
-    data_type_str = data_type(type='str')
-    return {
-        # TODO: Should we support a special type such UUID?
-        Types.integer: data_type_int,
-        Types.int32: data_type_int,
-        Types.int64: data_type_int,
-        Types.number: data_type_float,
-        Types.float: data_type_float,
-        Types.double: data_type_float,
-        Types.decimal: data_type.from_import(IMPORT_DECIMAL),
-        Types.time: data_type_str,
-        Types.string: data_type_str,
-        Types.byte: data_type_str,  # base64 encoded string
-        Types.binary: data_type(type='bytes'),
-        Types.date: data_type_str,
-        Types.date_time: data_type_str,
-        Types.timedelta: data_type.from_import(IMPORT_TIMEDELTA),
-        Types.password: data_type_str,
-        Types.email: data_type_str,
-        Types.uuid: data_type_str,
-        Types.uuid1: data_type_str,
-        Types.uuid2: data_type_str,
-        Types.uuid3: data_type_str,
-        Types.uuid4: data_type_str,
-        Types.uuid5: data_type_str,
-        Types.uri: data_type_str,
-        Types.hostname: data_type_str,
-        Types.ipv4: data_type_str,
-        Types.ipv6: data_type_str,
-        Types.ipv4_network: data_type_str,
-        Types.ipv6_network: data_type_str,
-        Types.boolean: data_type(type='bool'),
-        Types.object: data_type.from_import(IMPORT_ANY, is_dict=True),
-        Types.null: data_type(type='None'),
-        Types.array: data_type.from_import(IMPORT_ANY, is_list=True),
-        Types.any: data_type.from_import(IMPORT_ANY),
-    }
-
-
-class DataTypeManager(_DataTypeManager):
-    def __init__(
-        self,
-        python_version: PythonVersion = PythonVersion.PY_38,
-        use_standard_collections: bool = False,
-        use_generic_container_types: bool = False,
-        strict_types: Optional[Sequence[StrictTypes]] = None,
-        use_non_positive_negative_number_constrained_types: bool = False,
-        use_union_operator: bool = False,
-        use_pendulum: bool = False,
-        target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
-    ):
-        super().__init__(
-            python_version,
-            use_standard_collections,
-            use_generic_container_types,
-            strict_types,
-            use_non_positive_negative_number_constrained_types,
-            use_union_operator,
-            use_pendulum,
-            target_datetime_class,
-        )
-
-        self.type_map: Dict[Types, DataType] = type_map_factory(self.data_type)
-
-    def get_data_type(
-        self,
-        types: Types,
-        **_: Any,
-    ) -> DataType:
-        return self.type_map[types]
diff -pruN 0.26.4-3/datamodel_code_generator/model/union.py 0.45.0-1/datamodel_code_generator/model/union.py
--- 0.26.4-3/datamodel_code_generator/model/union.py	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/model/union.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,51 +0,0 @@
-from __future__ import annotations
-
-from pathlib import Path
-from typing import Any, ClassVar, DefaultDict, Dict, List, Optional, Tuple
-
-from datamodel_code_generator.imports import IMPORT_TYPE_ALIAS, IMPORT_UNION, Import
-from datamodel_code_generator.model import DataModel, DataModelFieldBase
-from datamodel_code_generator.model.base import UNDEFINED
-from datamodel_code_generator.reference import Reference
-
-
-class DataTypeUnion(DataModel):
-    TEMPLATE_FILE_PATH: ClassVar[str] = 'Union.jinja2'
-    BASE_CLASS: ClassVar[str] = ''
-    DEFAULT_IMPORTS: ClassVar[Tuple[Import, ...]] = (
-        IMPORT_TYPE_ALIAS,
-        IMPORT_UNION,
-    )
-
-    def __init__(
-        self,
-        *,
-        reference: Reference,
-        fields: List[DataModelFieldBase],
-        decorators: Optional[List[str]] = None,
-        base_classes: Optional[List[Reference]] = None,
-        custom_base_class: Optional[str] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        methods: Optional[List[str]] = None,
-        path: Optional[Path] = None,
-        description: Optional[str] = None,
-        default: Any = UNDEFINED,
-        nullable: bool = False,
-        keyword_only: bool = False,
-    ):
-        super().__init__(
-            reference=reference,
-            fields=fields,
-            decorators=decorators,
-            base_classes=base_classes,
-            custom_base_class=custom_base_class,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            methods=methods,
-            path=path,
-            description=description,
-            default=default,
-            nullable=nullable,
-            keyword_only=keyword_only,
-        )
diff -pruN 0.26.4-3/datamodel_code_generator/parser/__init__.py 0.45.0-1/datamodel_code_generator/parser/__init__.py
--- 0.26.4-3/datamodel_code_generator/parser/__init__.py	2024-12-15 17:25:57.705037000 +0000
+++ 0.45.0-1/datamodel_code_generator/parser/__init__.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,33 +0,0 @@
-from __future__ import annotations
-
-from enum import Enum
-from typing import Callable, Dict, Optional, TypeVar
-
-TK = TypeVar('TK')
-TV = TypeVar('TV')
-
-
-class LiteralType(Enum):
-    All = 'all'
-    One = 'one'
-
-
-class DefaultPutDict(Dict[TK, TV]):
-    def get_or_put(
-        self,
-        key: TK,
-        default: Optional[TV] = None,
-        default_factory: Optional[Callable[[TK], TV]] = None,
-    ) -> TV:
-        if key in self:
-            return self[key]
-        elif default:  # pragma: no cover
-            value = self[key] = default
-            return value
-        elif default_factory:
-            value = self[key] = default_factory(key)
-            return value
-        raise ValueError('Not found default and default_factory')  # pragma: no cover
-
-
-__all__ = ['LiteralType']
diff -pruN 0.26.4-3/datamodel_code_generator/parser/base.py 0.45.0-1/datamodel_code_generator/parser/base.py
--- 0.26.4-3/datamodel_code_generator/parser/base.py	2024-12-15 17:25:57.706037000 +0000
+++ 0.45.0-1/datamodel_code_generator/parser/base.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,1501 +0,0 @@
-import re
-import sys
-from abc import ABC, abstractmethod
-from collections import OrderedDict, defaultdict
-from itertools import groupby
-from pathlib import Path
-from typing import (
-    Any,
-    Callable,
-    DefaultDict,
-    Dict,
-    Iterable,
-    Iterator,
-    List,
-    Mapping,
-    NamedTuple,
-    Optional,
-    Sequence,
-    Set,
-    Tuple,
-    Type,
-    TypeVar,
-    Union,
-)
-from urllib.parse import ParseResult
-
-from pydantic import BaseModel
-
-from datamodel_code_generator.format import (
-    CodeFormatter,
-    DatetimeClassType,
-    PythonVersion,
-)
-from datamodel_code_generator.imports import (
-    IMPORT_ANNOTATIONS,
-    IMPORT_LITERAL,
-    IMPORT_LITERAL_BACKPORT,
-    Import,
-    Imports,
-)
-from datamodel_code_generator.model import dataclass as dataclass_model
-from datamodel_code_generator.model import msgspec as msgspec_model
-from datamodel_code_generator.model import pydantic as pydantic_model
-from datamodel_code_generator.model import pydantic_v2 as pydantic_model_v2
-from datamodel_code_generator.model.base import (
-    ALL_MODEL,
-    UNDEFINED,
-    BaseClassDataType,
-    ConstraintsBase,
-    DataModel,
-    DataModelFieldBase,
-)
-from datamodel_code_generator.model.enum import Enum, Member
-from datamodel_code_generator.parser import DefaultPutDict, LiteralType
-from datamodel_code_generator.reference import ModelResolver, Reference
-from datamodel_code_generator.types import DataType, DataTypeManager, StrictTypes
-from datamodel_code_generator.util import Protocol, runtime_checkable
-
-SPECIAL_PATH_FORMAT: str = '#-datamodel-code-generator-#-{}-#-special-#'
-
-
-def get_special_path(keyword: str, path: List[str]) -> List[str]:
-    return [*path, SPECIAL_PATH_FORMAT.format(keyword)]
-
-
-escape_characters = str.maketrans(
-    {
-        '\\': r'\\',
-        "'": r'\'',
-        '\b': r'\b',
-        '\f': r'\f',
-        '\n': r'\n',
-        '\r': r'\r',
-        '\t': r'\t',
-    }
-)
-
-
-def to_hashable(item: Any) -> Any:
-    if isinstance(
-        item,
-        (
-            list,
-            tuple,
-        ),
-    ):
-        return tuple(sorted(to_hashable(i) for i in item))
-    elif isinstance(item, dict):
-        return tuple(
-            sorted(
-                (
-                    k,
-                    to_hashable(v),
-                )
-                for k, v in item.items()
-            )
-        )
-    elif isinstance(item, set):  # pragma: no cover
-        return frozenset(to_hashable(i) for i in item)
-    elif isinstance(item, BaseModel):
-        return to_hashable(item.dict())
-    return item
-
-
-def dump_templates(templates: List[DataModel]) -> str:
-    return '\n\n\n'.join(str(m) for m in templates)
-
-
-ReferenceMapSet = Dict[str, Set[str]]
-SortedDataModels = Dict[str, DataModel]
-
-MAX_RECURSION_COUNT: int = sys.getrecursionlimit()
-
-
-def sort_data_models(
-    unsorted_data_models: List[DataModel],
-    sorted_data_models: Optional[SortedDataModels] = None,
-    require_update_action_models: Optional[List[str]] = None,
-    recursion_count: int = MAX_RECURSION_COUNT,
-) -> Tuple[List[DataModel], SortedDataModels, List[str]]:
-    if sorted_data_models is None:
-        sorted_data_models = OrderedDict()
-    if require_update_action_models is None:
-        require_update_action_models = []
-    sorted_model_count: int = len(sorted_data_models)
-
-    unresolved_references: List[DataModel] = []
-    for model in unsorted_data_models:
-        if not model.reference_classes:
-            sorted_data_models[model.path] = model
-        elif (
-            model.path in model.reference_classes and len(model.reference_classes) == 1
-        ):  # only self-referencing
-            sorted_data_models[model.path] = model
-            require_update_action_models.append(model.path)
-        elif (
-            not model.reference_classes - {model.path} - set(sorted_data_models)
-        ):  # reference classes have been resolved
-            sorted_data_models[model.path] = model
-            if model.path in model.reference_classes:
-                require_update_action_models.append(model.path)
-        else:
-            unresolved_references.append(model)
-    if unresolved_references:
-        if sorted_model_count != len(sorted_data_models) and recursion_count:
-            try:
-                return sort_data_models(
-                    unresolved_references,
-                    sorted_data_models,
-                    require_update_action_models,
-                    recursion_count - 1,
-                )
-            except RecursionError:  # pragma: no cover
-                pass
-
-        # sort on base_class dependency
-        while True:
-            ordered_models: List[Tuple[int, DataModel]] = []
-            unresolved_reference_model_names = [m.path for m in unresolved_references]
-            for model in unresolved_references:
-                indexes = [
-                    unresolved_reference_model_names.index(b.reference.path)
-                    for b in model.base_classes
-                    if b.reference
-                    and b.reference.path in unresolved_reference_model_names
-                ]
-                if indexes:
-                    ordered_models.append(
-                        (
-                            max(indexes),
-                            model,
-                        )
-                    )
-                else:
-                    ordered_models.append(
-                        (
-                            -1,
-                            model,
-                        )
-                    )
-            sorted_unresolved_models = [
-                m[1] for m in sorted(ordered_models, key=lambda m: m[0])
-            ]
-            if sorted_unresolved_models == unresolved_references:
-                break
-            unresolved_references = sorted_unresolved_models
-
-        # circular reference
-        unsorted_data_model_names = set(unresolved_reference_model_names)
-        for model in unresolved_references:
-            unresolved_model = (
-                model.reference_classes - {model.path} - set(sorted_data_models)
-            )
-            base_models = [
-                getattr(s.reference, 'path', None) for s in model.base_classes
-            ]
-            update_action_parent = set(require_update_action_models).intersection(
-                base_models
-            )
-            if not unresolved_model:
-                sorted_data_models[model.path] = model
-                if update_action_parent:
-                    require_update_action_models.append(model.path)
-                continue
-            if not unresolved_model - unsorted_data_model_names:
-                sorted_data_models[model.path] = model
-                require_update_action_models.append(model.path)
-                continue
-            # unresolved
-            unresolved_classes = ', '.join(
-                f'[class: {item.path} references: {item.reference_classes}]'
-                for item in unresolved_references
-            )
-            raise Exception(f'A Parser can not resolve classes: {unresolved_classes}.')
-    return unresolved_references, sorted_data_models, require_update_action_models
-
-
-def relative(current_module: str, reference: str) -> Tuple[str, str]:
-    """Find relative module path."""
-
-    current_module_path = current_module.split('.') if current_module else []
-    *reference_path, name = reference.split('.')
-
-    if current_module_path == reference_path:
-        return '', ''
-
-    i = 0
-    for x, y in zip(current_module_path, reference_path):
-        if x != y:
-            break
-        i += 1
-
-    left = '.' * (len(current_module_path) - i)
-    right = '.'.join(reference_path[i:])
-
-    if not left:
-        left = '.'
-    if not right:
-        right = name
-    elif '.' in right:
-        extra, right = right.rsplit('.', 1)
-        left += extra
-
-    return left, right
-
-
-def exact_import(from_: str, import_: str, short_name: str) -> Tuple[str, str]:
-    if from_ == len(from_) * '.':
-        # Prevents "from . import foo" becoming "from ..foo import Foo"
-        # or "from .. import foo" becoming "from ...foo import Foo"
-        # when our imported module has the same parent
-        return f'{from_}{import_}', short_name
-    return f'{from_}.{import_}', short_name
-
-
-@runtime_checkable
-class Child(Protocol):
-    @property
-    def parent(self) -> Optional[Any]:
-        raise NotImplementedError
-
-
-T = TypeVar('T')
-
-
-def get_most_of_parent(value: Any, type_: Optional[Type[T]] = None) -> Optional[T]:
-    if isinstance(value, Child) and (type_ is None or not isinstance(value, type_)):
-        return get_most_of_parent(value.parent, type_)
-    return value
-
-
-def title_to_class_name(title: str) -> str:
-    classname = re.sub('[^A-Za-z0-9]+', ' ', title)
-    classname = ''.join(x for x in classname.title() if not x.isspace())
-    return classname
-
-
-def _find_base_classes(model: DataModel) -> List[DataModel]:
-    return [
-        b.reference.source
-        for b in model.base_classes
-        if b.reference and isinstance(b.reference.source, DataModel)
-    ]
-
-
-def _find_field(
-    original_name: str, models: List[DataModel]
-) -> Optional[DataModelFieldBase]:
-    def _find_field_and_base_classes(
-        model_: DataModel,
-    ) -> Tuple[Optional[DataModelFieldBase], List[DataModel]]:
-        for field_ in model_.fields:
-            if field_.original_name == original_name:
-                return field_, []
-        return None, _find_base_classes(model_)  # pragma: no cover
-
-    for model in models:
-        field, base_models = _find_field_and_base_classes(model)
-        if field:
-            return field
-        models.extend(base_models)  # pragma: no cover
-
-    return None  # pragma: no cover
-
-
-def _copy_data_types(data_types: List[DataType]) -> List[DataType]:
-    copied_data_types: List[DataType] = []
-    for data_type_ in data_types:
-        if data_type_.reference:
-            copied_data_types.append(
-                data_type_.__class__(reference=data_type_.reference)
-            )
-        elif data_type_.data_types:  # pragma: no cover
-            copied_data_type = data_type_.copy()
-            copied_data_type.data_types = _copy_data_types(data_type_.data_types)
-            copied_data_types.append(copied_data_type)
-        else:
-            copied_data_types.append(data_type_.copy())
-    return copied_data_types
-
-
-class Result(BaseModel):
-    body: str
-    source: Optional[Path] = None
-
-
-class Source(BaseModel):
-    path: Path
-    text: str
-
-    @classmethod
-    def from_path(cls, path: Path, base_path: Path, encoding: str) -> 'Source':
-        return cls(
-            path=path.relative_to(base_path),
-            text=path.read_text(encoding=encoding),
-        )
-
-
-class Parser(ABC):
-    def __init__(
-        self,
-        source: Union[str, Path, List[Path], ParseResult],
-        *,
-        data_model_type: Type[DataModel] = pydantic_model.BaseModel,
-        data_model_root_type: Type[DataModel] = pydantic_model.CustomRootType,
-        data_type_manager_type: Type[DataTypeManager] = pydantic_model.DataTypeManager,
-        data_model_field_type: Type[DataModelFieldBase] = pydantic_model.DataModelField,
-        base_class: Optional[str] = None,
-        additional_imports: Optional[List[str]] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        target_python_version: PythonVersion = PythonVersion.PY_38,
-        dump_resolve_reference_action: Optional[Callable[[Iterable[str]], str]] = None,
-        validation: bool = False,
-        field_constraints: bool = False,
-        snake_case_field: bool = False,
-        strip_default_none: bool = False,
-        aliases: Optional[Mapping[str, str]] = None,
-        allow_population_by_field_name: bool = False,
-        apply_default_values_for_required_fields: bool = False,
-        allow_extra_fields: bool = False,
-        force_optional_for_required_fields: bool = False,
-        class_name: Optional[str] = None,
-        use_standard_collections: bool = False,
-        base_path: Optional[Path] = None,
-        use_schema_description: bool = False,
-        use_field_description: bool = False,
-        use_default_kwarg: bool = False,
-        reuse_model: bool = False,
-        encoding: str = 'utf-8',
-        enum_field_as_literal: Optional[LiteralType] = None,
-        set_default_enum_member: bool = False,
-        use_subclass_enum: bool = False,
-        strict_nullable: bool = False,
-        use_generic_container_types: bool = False,
-        enable_faux_immutability: bool = False,
-        remote_text_cache: Optional[DefaultPutDict[str, str]] = None,
-        disable_appending_item_suffix: bool = False,
-        strict_types: Optional[Sequence[StrictTypes]] = None,
-        empty_enum_field_name: Optional[str] = None,
-        custom_class_name_generator: Optional[
-            Callable[[str], str]
-        ] = title_to_class_name,
-        field_extra_keys: Optional[Set[str]] = None,
-        field_include_all_keys: bool = False,
-        field_extra_keys_without_x_prefix: Optional[Set[str]] = None,
-        wrap_string_literal: Optional[bool] = None,
-        use_title_as_name: bool = False,
-        use_operation_id_as_name: bool = False,
-        use_unique_items_as_set: bool = False,
-        http_headers: Optional[Sequence[Tuple[str, str]]] = None,
-        http_ignore_tls: bool = False,
-        use_annotated: bool = False,
-        use_non_positive_negative_number_constrained_types: bool = False,
-        original_field_name_delimiter: Optional[str] = None,
-        use_double_quotes: bool = False,
-        use_union_operator: bool = False,
-        allow_responses_without_content: bool = False,
-        collapse_root_models: bool = False,
-        special_field_name_prefix: Optional[str] = None,
-        remove_special_field_name_prefix: bool = False,
-        capitalise_enum_members: bool = False,
-        keep_model_order: bool = False,
-        use_one_literal_as_default: bool = False,
-        known_third_party: Optional[List[str]] = None,
-        custom_formatters: Optional[List[str]] = None,
-        custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
-        use_pendulum: bool = False,
-        http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
-        treat_dots_as_module: bool = False,
-        use_exact_imports: bool = False,
-        default_field_extras: Optional[Dict[str, Any]] = None,
-        target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
-        keyword_only: bool = False,
-        no_alias: bool = False,
-    ) -> None:
-        self.keyword_only = keyword_only
-        self.data_type_manager: DataTypeManager = data_type_manager_type(
-            python_version=target_python_version,
-            use_standard_collections=use_standard_collections,
-            use_generic_container_types=use_generic_container_types,
-            strict_types=strict_types,
-            use_union_operator=use_union_operator,
-            use_pendulum=use_pendulum,
-            target_datetime_class=target_datetime_class,
-        )
-        self.data_model_type: Type[DataModel] = data_model_type
-        self.data_model_root_type: Type[DataModel] = data_model_root_type
-        self.data_model_field_type: Type[DataModelFieldBase] = data_model_field_type
-
-        self.imports: Imports = Imports(use_exact_imports)
-        self.use_exact_imports: bool = use_exact_imports
-        self._append_additional_imports(additional_imports=additional_imports)
-
-        self.base_class: Optional[str] = base_class
-        self.target_python_version: PythonVersion = target_python_version
-        self.results: List[DataModel] = []
-        self.dump_resolve_reference_action: Optional[Callable[[Iterable[str]], str]] = (
-            dump_resolve_reference_action
-        )
-        self.validation: bool = validation
-        self.field_constraints: bool = field_constraints
-        self.snake_case_field: bool = snake_case_field
-        self.strip_default_none: bool = strip_default_none
-        self.apply_default_values_for_required_fields: bool = (
-            apply_default_values_for_required_fields
-        )
-        self.force_optional_for_required_fields: bool = (
-            force_optional_for_required_fields
-        )
-        self.use_schema_description: bool = use_schema_description
-        self.use_field_description: bool = use_field_description
-        self.use_default_kwarg: bool = use_default_kwarg
-        self.reuse_model: bool = reuse_model
-        self.encoding: str = encoding
-        self.enum_field_as_literal: Optional[LiteralType] = enum_field_as_literal
-        self.set_default_enum_member: bool = set_default_enum_member
-        self.use_subclass_enum: bool = use_subclass_enum
-        self.strict_nullable: bool = strict_nullable
-        self.use_generic_container_types: bool = use_generic_container_types
-        self.use_union_operator: bool = use_union_operator
-        self.enable_faux_immutability: bool = enable_faux_immutability
-        self.custom_class_name_generator: Optional[Callable[[str], str]] = (
-            custom_class_name_generator
-        )
-        self.field_extra_keys: Set[str] = field_extra_keys or set()
-        self.field_extra_keys_without_x_prefix: Set[str] = (
-            field_extra_keys_without_x_prefix or set()
-        )
-        self.field_include_all_keys: bool = field_include_all_keys
-
-        self.remote_text_cache: DefaultPutDict[str, str] = (
-            remote_text_cache or DefaultPutDict()
-        )
-        self.current_source_path: Optional[Path] = None
-        self.use_title_as_name: bool = use_title_as_name
-        self.use_operation_id_as_name: bool = use_operation_id_as_name
-        self.use_unique_items_as_set: bool = use_unique_items_as_set
-
-        if base_path:
-            self.base_path = base_path
-        elif isinstance(source, Path):
-            self.base_path = (
-                source.absolute() if source.is_dir() else source.absolute().parent
-            )
-        else:
-            self.base_path = Path.cwd()
-
-        self.source: Union[str, Path, List[Path], ParseResult] = source
-        self.custom_template_dir = custom_template_dir
-        self.extra_template_data: DefaultDict[str, Any] = (
-            extra_template_data or defaultdict(dict)
-        )
-
-        if allow_population_by_field_name:
-            self.extra_template_data[ALL_MODEL]['allow_population_by_field_name'] = True
-
-        if allow_extra_fields:
-            self.extra_template_data[ALL_MODEL]['allow_extra_fields'] = True
-
-        if enable_faux_immutability:
-            self.extra_template_data[ALL_MODEL]['allow_mutation'] = False
-
-        self.model_resolver = ModelResolver(
-            base_url=source.geturl() if isinstance(source, ParseResult) else None,
-            singular_name_suffix='' if disable_appending_item_suffix else None,
-            aliases=aliases,
-            empty_field_name=empty_enum_field_name,
-            snake_case_field=snake_case_field,
-            custom_class_name_generator=custom_class_name_generator,
-            base_path=self.base_path,
-            original_field_name_delimiter=original_field_name_delimiter,
-            special_field_name_prefix=special_field_name_prefix,
-            remove_special_field_name_prefix=remove_special_field_name_prefix,
-            capitalise_enum_members=capitalise_enum_members,
-            no_alias=no_alias,
-        )
-        self.class_name: Optional[str] = class_name
-        self.wrap_string_literal: Optional[bool] = wrap_string_literal
-        self.http_headers: Optional[Sequence[Tuple[str, str]]] = http_headers
-        self.http_query_parameters: Optional[Sequence[Tuple[str, str]]] = (
-            http_query_parameters
-        )
-        self.http_ignore_tls: bool = http_ignore_tls
-        self.use_annotated: bool = use_annotated
-        if self.use_annotated and not self.field_constraints:  # pragma: no cover
-            raise Exception(
-                '`use_annotated=True` has to be used with `field_constraints=True`'
-            )
-        self.use_non_positive_negative_number_constrained_types = (
-            use_non_positive_negative_number_constrained_types
-        )
-        self.use_double_quotes = use_double_quotes
-        self.allow_responses_without_content = allow_responses_without_content
-        self.collapse_root_models = collapse_root_models
-        self.capitalise_enum_members = capitalise_enum_members
-        self.keep_model_order = keep_model_order
-        self.use_one_literal_as_default = use_one_literal_as_default
-        self.known_third_party = known_third_party
-        self.custom_formatter = custom_formatters
-        self.custom_formatters_kwargs = custom_formatters_kwargs
-        self.treat_dots_as_module = treat_dots_as_module
-        self.default_field_extras: Optional[Dict[str, Any]] = default_field_extras
-
-    @property
-    def iter_source(self) -> Iterator[Source]:
-        if isinstance(self.source, str):
-            yield Source(path=Path(), text=self.source)
-        elif isinstance(self.source, Path):  # pragma: no cover
-            if self.source.is_dir():
-                for path in sorted(self.source.rglob('*'), key=lambda p: p.name):
-                    if path.is_file():
-                        yield Source.from_path(path, self.base_path, self.encoding)
-            else:
-                yield Source.from_path(self.source, self.base_path, self.encoding)
-        elif isinstance(self.source, list):  # pragma: no cover
-            for path in self.source:
-                yield Source.from_path(path, self.base_path, self.encoding)
-        else:
-            yield Source(
-                path=Path(self.source.path),
-                text=self.remote_text_cache.get_or_put(
-                    self.source.geturl(), default_factory=self._get_text_from_url
-                ),
-            )
-
-    def _append_additional_imports(
-        self, additional_imports: Optional[List[str]]
-    ) -> None:
-        if additional_imports is None:
-            additional_imports = []
-
-        for additional_import_string in additional_imports:
-            if additional_import_string is None:
-                continue
-            new_import = Import.from_full_path(additional_import_string)
-            self.imports.append(new_import)
-
-    def _get_text_from_url(self, url: str) -> str:
-        from datamodel_code_generator.http import get_body
-
-        return self.remote_text_cache.get_or_put(
-            url,
-            default_factory=lambda url_: get_body(
-                url, self.http_headers, self.http_ignore_tls, self.http_query_parameters
-            ),
-        )
-
-    @classmethod
-    def get_url_path_parts(cls, url: ParseResult) -> List[str]:
-        return [
-            f'{url.scheme}://{url.hostname}',
-            *url.path.split('/')[1:],
-        ]
-
-    @property
-    def data_type(self) -> Type[DataType]:
-        return self.data_type_manager.data_type
-
-    @abstractmethod
-    def parse_raw(self) -> None:
-        raise NotImplementedError
-
-    def __delete_duplicate_models(self, models: List[DataModel]) -> None:
-        model_class_names: Dict[str, DataModel] = {}
-        model_to_duplicate_models: DefaultDict[DataModel, List[DataModel]] = (
-            defaultdict(list)
-        )
-        for model in models[:]:
-            if isinstance(model, self.data_model_root_type):
-                root_data_type = model.fields[0].data_type
-
-                # backward compatible
-                # Remove duplicated root model
-                if (
-                    root_data_type.reference
-                    and not root_data_type.is_dict
-                    and not root_data_type.is_list
-                    and root_data_type.reference.source in models
-                    and root_data_type.reference.name
-                    == self.model_resolver.get_class_name(
-                        model.reference.original_name, unique=False
-                    ).name
-                ):
-                    # Replace referenced duplicate model to original model
-                    for child in model.reference.children[:]:
-                        child.replace_reference(root_data_type.reference)
-                    models.remove(model)
-                    for data_type in model.all_data_types:
-                        if data_type.reference:
-                            data_type.remove_reference()
-                    continue
-
-                #  Custom root model can't be inherited on restriction of Pydantic
-                for child in model.reference.children:
-                    # inheritance model
-                    if isinstance(child, DataModel):
-                        for base_class in child.base_classes[:]:
-                            if base_class.reference == model.reference:
-                                child.base_classes.remove(base_class)
-                        if not child.base_classes:  # pragma: no cover
-                            child.set_base_class()
-
-            class_name = model.duplicate_class_name or model.class_name
-            if class_name in model_class_names:
-                model_key = tuple(
-                    to_hashable(v)
-                    for v in (
-                        model.render(class_name=model.duplicate_class_name),
-                        model.imports,
-                    )
-                )
-                original_model = model_class_names[class_name]
-                original_model_key = tuple(
-                    to_hashable(v)
-                    for v in (
-                        original_model.render(
-                            class_name=original_model.duplicate_class_name
-                        ),
-                        original_model.imports,
-                    )
-                )
-                if model_key == original_model_key:
-                    model_to_duplicate_models[original_model].append(model)
-                    continue
-            model_class_names[class_name] = model
-        for model, duplicate_models in model_to_duplicate_models.items():
-            for duplicate_model in duplicate_models:
-                for child in duplicate_model.reference.children[:]:
-                    child.replace_reference(model.reference)
-                models.remove(duplicate_model)
-
-    @classmethod
-    def __replace_duplicate_name_in_module(cls, models: List[DataModel]) -> None:
-        scoped_model_resolver = ModelResolver(
-            exclude_names={i.alias or i.import_ for m in models for i in m.imports},
-            duplicate_name_suffix='Model',
-        )
-
-        model_names: Dict[str, DataModel] = {}
-        for model in models:
-            class_name: str = model.class_name
-            generated_name: str = scoped_model_resolver.add(
-                [model.path], class_name, unique=True, class_name=True
-            ).name
-            if class_name != generated_name:
-                model.class_name = generated_name
-            model_names[model.class_name] = model
-
-        for model in models:
-            duplicate_name = model.duplicate_class_name
-            # check only first desired name
-            if duplicate_name and duplicate_name not in model_names:
-                del model_names[model.class_name]
-                model.class_name = duplicate_name
-                model_names[duplicate_name] = model
-
-    def __change_from_import(
-        self,
-        models: List[DataModel],
-        imports: Imports,
-        scoped_model_resolver: ModelResolver,
-        init: bool,
-    ) -> None:
-        for model in models:
-            scoped_model_resolver.add([model.path], model.class_name)
-        for model in models:
-            before_import = model.imports
-            imports.append(before_import)
-            for data_type in model.all_data_types:
-                # To change from/import
-
-                if not data_type.reference or data_type.reference.source in models:
-                    # No need to import non-reference model.
-                    # Or, Referenced model is in the same file. we don't need to import the model
-                    continue
-
-                if isinstance(data_type, BaseClassDataType):
-                    left, right = relative(model.module_name, data_type.full_name)
-                    from_ = (
-                        ''.join([left, right])
-                        if left.endswith('.')
-                        else '.'.join([left, right])
-                    )
-                    import_ = data_type.reference.short_name
-                    full_path = from_, import_
-                else:
-                    from_, import_ = full_path = relative(
-                        model.module_name, data_type.full_name
-                    )
-                    if imports.use_exact:  # pragma: no cover
-                        from_, import_ = exact_import(
-                            from_, import_, data_type.reference.short_name
-                        )
-                    import_ = import_.replace('-', '_')
-                    if (
-                        len(model.module_path) > 1
-                        and model.module_path[-1].count('.') > 0
-                        and not self.treat_dots_as_module
-                    ):
-                        rel_path_depth = model.module_path[-1].count('.')
-                        from_ = from_[rel_path_depth:]
-
-                alias = scoped_model_resolver.add(full_path, import_).name
-
-                name = data_type.reference.short_name
-                if from_ and import_ and alias != name:
-                    data_type.alias = (
-                        alias
-                        if data_type.reference.short_name == import_
-                        else f'{alias}.{name}'
-                    )
-
-                if init:
-                    from_ = '.' + from_
-                imports.append(
-                    Import(
-                        from_=from_,
-                        import_=import_,
-                        alias=alias,
-                        reference_path=data_type.reference.path,
-                    ),
-                )
-            after_import = model.imports
-            if before_import != after_import:
-                imports.append(after_import)
-
-    @classmethod
-    def __extract_inherited_enum(cls, models: List[DataModel]) -> None:
-        for model in models[:]:
-            if model.fields:
-                continue
-            enums: List[Enum] = []
-            for base_model in model.base_classes:
-                if not base_model.reference:
-                    continue
-                source_model = base_model.reference.source
-                if isinstance(source_model, Enum):
-                    enums.append(source_model)
-            if enums:
-                models.insert(
-                    models.index(model),
-                    enums[0].__class__(
-                        fields=[f for e in enums for f in e.fields],
-                        description=model.description,
-                        reference=model.reference,
-                    ),
-                )
-                models.remove(model)
-
-    def __apply_discriminator_type(
-        self,
-        models: List[DataModel],
-        imports: Imports,
-    ) -> None:
-        for model in models:
-            for field in model.fields:
-                discriminator = field.extras.get('discriminator')
-                if not discriminator or not isinstance(discriminator, dict):
-                    continue
-                property_name = discriminator.get('propertyName')
-                if not property_name:  # pragma: no cover
-                    continue
-                mapping = discriminator.get('mapping', {})
-                for data_type in field.data_type.data_types:
-                    if not data_type.reference:  # pragma: no cover
-                        continue
-                    discriminator_model = data_type.reference.source
-
-                    if not isinstance(  # pragma: no cover
-                        discriminator_model,
-                        (
-                            pydantic_model.BaseModel,
-                            pydantic_model_v2.BaseModel,
-                            dataclass_model.DataClass,
-                            msgspec_model.Struct,
-                        ),
-                    ):
-                        continue  # pragma: no cover
-
-                    type_names: List[str] = []
-
-                    def check_paths(
-                        model: Union[
-                            pydantic_model.BaseModel,
-                            pydantic_model_v2.BaseModel,
-                            Reference,
-                        ],
-                        mapping: Dict[str, str],
-                        type_names: List[str] = type_names,
-                    ) -> None:
-                        """Helper function to validate paths for a given model."""
-                        for name, path in mapping.items():
-                            if (
-                                model.path.split('#/')[-1] != path.split('#/')[-1]
-                            ) and (
-                                path.startswith('#/')
-                                or model.path[:-1] != path.split('/')[-1]
-                            ):
-                                t_path = path[str(path).find('/') + 1 :]
-                                t_disc = model.path[: str(model.path).find('#')].lstrip(
-                                    '../'
-                                )
-                                t_disc_2 = '/'.join(t_disc.split('/')[1:])
-                                if t_path != t_disc and t_path != t_disc_2:
-                                    continue
-                            type_names.append(name)
-
-                    # Check the main discriminator model path
-                    if mapping:
-                        check_paths(discriminator_model, mapping)
-
-                        # Check the base_classes if they exist
-                        if len(type_names) == 0:
-                            for base_class in discriminator_model.base_classes:
-                                check_paths(base_class.reference, mapping)
-                    else:
-                        type_names = [discriminator_model.path.split('/')[-1]]
-                    if not type_names:  # pragma: no cover
-                        raise RuntimeError(
-                            f'Discriminator type is not found. {data_type.reference.path}'
-                        )
-                    has_one_literal = False
-                    for discriminator_field in discriminator_model.fields:
-                        if (
-                            discriminator_field.original_name
-                            or discriminator_field.name
-                        ) != property_name:
-                            continue
-                        literals = discriminator_field.data_type.literals
-                        if len(literals) == 1 and literals[0] == (
-                            type_names[0] if type_names else None
-                        ):
-                            has_one_literal = True
-                            if isinstance(
-                                discriminator_model, msgspec_model.Struct
-                            ):  # pragma: no cover
-                                discriminator_model.add_base_class_kwarg(
-                                    'tag_field', f"'{property_name}'"
-                                )
-                                discriminator_model.add_base_class_kwarg(
-                                    'tag', discriminator_field.represented_default
-                                )
-                                discriminator_field.extras['is_classvar'] = True
-                            # Found the discriminator field, no need to keep looking
-                            break
-                        for (
-                            field_data_type
-                        ) in discriminator_field.data_type.all_data_types:
-                            if field_data_type.reference:  # pragma: no cover
-                                field_data_type.remove_reference()
-                        discriminator_field.data_type = self.data_type(
-                            literals=type_names
-                        )
-                        discriminator_field.data_type.parent = discriminator_field
-                        discriminator_field.required = True
-                        imports.append(discriminator_field.imports)
-                        has_one_literal = True
-                    if not has_one_literal:
-                        discriminator_model.fields.append(
-                            self.data_model_field_type(
-                                name=property_name,
-                                data_type=self.data_type(literals=type_names),
-                                required=True,
-                            )
-                        )
-                    literal = (
-                        IMPORT_LITERAL
-                        if self.target_python_version.has_literal_type
-                        else IMPORT_LITERAL_BACKPORT
-                    )
-                    has_imported_literal = any(
-                        literal == import_  # type: ignore [comparison-overlap]
-                        for import_ in imports
-                    )
-                    if has_imported_literal:  # pragma: no cover
-                        imports.append(literal)
-
-    @classmethod
-    def _create_set_from_list(cls, data_type: DataType) -> Optional[DataType]:
-        if data_type.is_list:
-            new_data_type = data_type.copy()
-            new_data_type.is_list = False
-            new_data_type.is_set = True
-            for data_type_ in new_data_type.data_types:
-                data_type_.parent = new_data_type
-            return new_data_type
-        elif data_type.data_types:  # pragma: no cover
-            for index, nested_data_type in enumerate(data_type.data_types[:]):
-                set_data_type = cls._create_set_from_list(nested_data_type)
-                if set_data_type:  # pragma: no cover
-                    data_type.data_types[index] = set_data_type
-            return data_type
-        return None  # pragma: no cover
-
-    def __replace_unique_list_to_set(self, models: List[DataModel]) -> None:
-        for model in models:
-            for model_field in model.fields:
-                if not self.use_unique_items_as_set:
-                    continue
-
-                if not (
-                    model_field.constraints and model_field.constraints.unique_items
-                ):
-                    continue
-                set_data_type = self._create_set_from_list(model_field.data_type)
-                if set_data_type:  # pragma: no cover
-                    model_field.data_type.parent = None
-                    model_field.data_type = set_data_type
-                    set_data_type.parent = model_field
-
-    @classmethod
-    def __set_reference_default_value_to_field(cls, models: List[DataModel]) -> None:
-        for model in models:
-            for model_field in model.fields:
-                if not model_field.data_type.reference or model_field.has_default:
-                    continue
-                if isinstance(
-                    model_field.data_type.reference.source, DataModel
-                ):  # pragma: no cover
-                    if model_field.data_type.reference.source.default != UNDEFINED:
-                        model_field.default = (
-                            model_field.data_type.reference.source.default
-                        )
-
-    def __reuse_model(
-        self, models: List[DataModel], require_update_action_models: List[str]
-    ) -> None:
-        if not self.reuse_model:
-            return None
-        model_cache: Dict[Tuple[str, ...], Reference] = {}
-        duplicates = []
-        for model in models[:]:
-            model_key = tuple(
-                to_hashable(v) for v in (model.render(class_name='M'), model.imports)
-            )
-            cached_model_reference = model_cache.get(model_key)
-            if cached_model_reference:
-                if isinstance(model, Enum):
-                    for child in model.reference.children[:]:
-                        # child is resolved data_type by reference
-                        data_model = get_most_of_parent(child)
-                        # TODO: replace reference in all modules
-                        if data_model in models:  # pragma: no cover
-                            child.replace_reference(cached_model_reference)
-                    duplicates.append(model)
-                else:
-                    index = models.index(model)
-                    inherited_model = model.__class__(
-                        fields=[],
-                        base_classes=[cached_model_reference],
-                        description=model.description,
-                        reference=Reference(
-                            name=model.name,
-                            path=model.reference.path + '/reuse',
-                        ),
-                        custom_template_dir=model._custom_template_dir,
-                    )
-                    if cached_model_reference.path in require_update_action_models:
-                        require_update_action_models.append(inherited_model.path)
-                    models.insert(index, inherited_model)
-                    models.remove(model)
-
-            else:
-                model_cache[model_key] = model.reference
-
-        for duplicate in duplicates:
-            models.remove(duplicate)
-
-    def __collapse_root_models(
-        self,
-        models: List[DataModel],
-        unused_models: List[DataModel],
-        imports: Imports,
-        scoped_model_resolver: ModelResolver,
-    ) -> None:
-        if not self.collapse_root_models:
-            return None
-
-        for model in models:
-            for model_field in model.fields:
-                for data_type in model_field.data_type.all_data_types:
-                    reference = data_type.reference
-                    if not reference or not isinstance(
-                        reference.source, self.data_model_root_type
-                    ):
-                        continue
-
-                    # Use root-type as model_field type
-                    root_type_model = reference.source
-                    root_type_field = root_type_model.fields[0]
-
-                    if (
-                        self.field_constraints
-                        and isinstance(root_type_field.constraints, ConstraintsBase)
-                        and root_type_field.constraints.has_constraints
-                        and any(
-                            d
-                            for d in model_field.data_type.all_data_types
-                            if d.is_dict or d.is_union
-                        )
-                    ):
-                        continue  # pragma: no cover
-
-                    # set copied data_type
-                    copied_data_type = root_type_field.data_type.copy()
-                    if isinstance(data_type.parent, self.data_model_field_type):
-                        # for field
-                        # override empty field by root-type field
-                        model_field.extras = {
-                            **root_type_field.extras,
-                            **model_field.extras,
-                        }
-                        model_field.process_const()
-
-                        if self.field_constraints:
-                            model_field.constraints = ConstraintsBase.merge_constraints(
-                                root_type_field.constraints, model_field.constraints
-                            )
-
-                        data_type.parent.data_type = copied_data_type
-
-                    elif data_type.parent.is_list:
-                        if self.field_constraints:
-                            model_field.constraints = ConstraintsBase.merge_constraints(
-                                root_type_field.constraints, model_field.constraints
-                            )
-                        if isinstance(
-                            root_type_field,
-                            pydantic_model.DataModelField,
-                        ) and not model_field.extras.get('discriminator'):
-                            discriminator = root_type_field.extras.get('discriminator')
-                            if discriminator:
-                                model_field.extras['discriminator'] = discriminator
-                        data_type.parent.data_types.remove(
-                            data_type
-                        )  # pragma: no cover
-                        data_type.parent.data_types.append(copied_data_type)
-
-                    elif isinstance(data_type.parent, DataType):
-                        # for data_type
-                        data_type_id = id(data_type)
-                        data_type.parent.data_types = [
-                            d
-                            for d in (*data_type.parent.data_types, copied_data_type)
-                            if id(d) != data_type_id
-                        ]
-                    else:  # pragma: no cover
-                        continue
-
-                    for d in root_type_field.data_type.data_types:
-                        if d.reference is None:
-                            continue
-                        from_, import_ = full_path = relative(
-                            model.module_name, d.full_name
-                        )
-                        if from_ and import_:
-                            alias = scoped_model_resolver.add(full_path, import_)
-                            d.alias = (
-                                alias.name
-                                if d.reference.short_name == import_
-                                else f'{alias.name}.{d.reference.short_name}'
-                            )
-                            imports.append(
-                                [
-                                    Import(
-                                        from_=from_,
-                                        import_=import_,
-                                        alias=alias.name,
-                                        reference_path=d.reference.path,
-                                    )
-                                ]
-                            )
-
-                    original_field = get_most_of_parent(data_type, DataModelFieldBase)
-                    if original_field:  # pragma: no cover
-                        # TODO: Improve detection of reference type
-                        imports.append(original_field.imports)
-
-                    data_type.remove_reference()
-
-                    root_type_model.reference.children = [
-                        c
-                        for c in root_type_model.reference.children
-                        if getattr(c, 'parent', None)
-                    ]
-
-                    imports.remove_referenced_imports(root_type_model.path)
-                    if not root_type_model.reference.children:
-                        unused_models.append(root_type_model)
-
-    def __set_default_enum_member(
-        self,
-        models: List[DataModel],
-    ) -> None:
-        if not self.set_default_enum_member:
-            return None
-        for model in models:
-            for model_field in model.fields:
-                if not model_field.default:
-                    continue
-                for data_type in model_field.data_type.all_data_types:
-                    if data_type.reference and isinstance(
-                        data_type.reference.source, Enum
-                    ):  # pragma: no cover
-                        if isinstance(model_field.default, list):
-                            enum_member: Union[List[Member], Optional[Member]] = [
-                                e
-                                for e in (
-                                    data_type.reference.source.find_member(d)
-                                    for d in model_field.default
-                                )
-                                if e
-                            ]
-                        else:
-                            enum_member = data_type.reference.source.find_member(
-                                model_field.default
-                            )
-                        if not enum_member:
-                            continue
-                        model_field.default = enum_member
-                        if data_type.alias:
-                            if isinstance(enum_member, list):
-                                for enum_member_ in enum_member:
-                                    enum_member_.alias = data_type.alias
-                            else:
-                                enum_member.alias = data_type.alias
-
-    def __override_required_field(
-        self,
-        models: List[DataModel],
-    ) -> None:
-        for model in models:
-            if isinstance(model, (Enum, self.data_model_root_type)):
-                continue
-            for index, model_field in enumerate(model.fields[:]):
-                data_type = model_field.data_type
-                if (
-                    not model_field.original_name
-                    or data_type.data_types
-                    or data_type.reference
-                    or data_type.type
-                    or data_type.literals
-                    or data_type.dict_key
-                ):
-                    continue
-
-                original_field = _find_field(
-                    model_field.original_name, _find_base_classes(model)
-                )
-                if not original_field:  # pragma: no cover
-                    model.fields.remove(model_field)
-                    continue
-                copied_original_field = original_field.copy()
-                if original_field.data_type.reference:
-                    data_type = self.data_type_manager.data_type(
-                        reference=original_field.data_type.reference,
-                    )
-                elif original_field.data_type.data_types:
-                    data_type = original_field.data_type.copy()
-                    data_type.data_types = _copy_data_types(
-                        original_field.data_type.data_types
-                    )
-                    for data_type_ in data_type.data_types:
-                        data_type_.parent = data_type
-                else:
-                    data_type = original_field.data_type.copy()
-                data_type.parent = copied_original_field
-                copied_original_field.data_type = data_type
-                copied_original_field.parent = model
-                copied_original_field.required = True
-                model.fields.insert(index, copied_original_field)
-                model.fields.remove(model_field)
-
-    def __sort_models(
-        self,
-        models: List[DataModel],
-        imports: Imports,
-    ) -> None:
-        if not self.keep_model_order:
-            return
-
-        models.sort(key=lambda x: x.class_name)
-
-        imported = {i for v in imports.values() for i in v}
-        model_class_name_baseclasses: Dict[DataModel, Tuple[str, Set[str]]] = {}
-        for model in models:
-            class_name = model.class_name
-            model_class_name_baseclasses[model] = (
-                class_name,
-                {b.type_hint for b in model.base_classes if b.reference} - {class_name},
-            )
-
-        changed: bool = True
-        while changed:
-            changed = False
-            resolved = imported.copy()
-            for i in range(len(models) - 1):
-                model = models[i]
-                class_name, baseclasses = model_class_name_baseclasses[model]
-                if not baseclasses - resolved:
-                    resolved.add(class_name)
-                    continue
-                models[i], models[i + 1] = models[i + 1], model
-                changed = True
-
-    def __set_one_literal_on_default(self, models: List[DataModel]) -> None:
-        if not self.use_one_literal_as_default:
-            return None
-        for model in models:
-            for model_field in model.fields:
-                if not model_field.required or len(model_field.data_type.literals) != 1:
-                    continue
-                model_field.default = model_field.data_type.literals[0]
-                model_field.required = False
-                if model_field.nullable is not True:  # pragma: no cover
-                    model_field.nullable = False
-
-    @classmethod
-    def __postprocess_result_modules(cls, results):
-        def process(input_tuple) -> Tuple[str, ...]:
-            r = []
-            for item in input_tuple:
-                p = item.split('.')
-                if len(p) > 1:
-                    r.extend(p[:-1])
-                    r.append(p[-1])
-                else:
-                    r.append(item)
-
-            r = r[:-2] + [f'{r[-2]}.{r[-1]}']
-            return tuple(r)
-
-        results = {process(k): v for k, v in results.items()}
-
-        init_result = [v for k, v in results.items() if k[-1] == '__init__.py'][0]
-        folders = {t[:-1] if t[-1].endswith('.py') else t for t in results.keys()}
-        for folder in folders:
-            for i in range(len(folder)):
-                subfolder = folder[: i + 1]
-                init_file = subfolder + ('__init__.py',)
-                results.update({init_file: init_result})
-        return results
-
-    def __change_imported_model_name(
-        self,
-        models: List[DataModel],
-        imports: Imports,
-        scoped_model_resolver: ModelResolver,
-    ) -> None:
-        imported_names = {
-            imports.alias[from_][i]
-            if i in imports.alias[from_] and i != imports.alias[from_][i]
-            else i
-            for from_, import_ in imports.items()
-            for i in import_
-        }
-        for model in models:
-            if model.class_name not in imported_names:  # pragma: no cover
-                continue
-
-            model.reference.name = scoped_model_resolver.add(  # pragma: no cover
-                path=get_special_path('imported_name', model.path.split('/')),
-                original_name=model.reference.name,
-                unique=True,
-                class_name=True,
-            ).name
-
-    def parse(
-        self,
-        with_import: Optional[bool] = True,
-        format_: Optional[bool] = True,
-        settings_path: Optional[Path] = None,
-    ) -> Union[str, Dict[Tuple[str, ...], Result]]:
-        self.parse_raw()
-
-        if with_import:
-            if self.target_python_version != PythonVersion.PY_36:
-                self.imports.append(IMPORT_ANNOTATIONS)
-
-        if format_:
-            code_formatter: Optional[CodeFormatter] = CodeFormatter(
-                self.target_python_version,
-                settings_path,
-                self.wrap_string_literal,
-                skip_string_normalization=not self.use_double_quotes,
-                known_third_party=self.known_third_party,
-                custom_formatters=self.custom_formatter,
-                custom_formatters_kwargs=self.custom_formatters_kwargs,
-            )
-        else:
-            code_formatter = None
-
-        _, sorted_data_models, require_update_action_models = sort_data_models(
-            self.results
-        )
-
-        results: Dict[Tuple[str, ...], Result] = {}
-
-        def module_key(data_model: DataModel) -> Tuple[str, ...]:
-            return tuple(data_model.module_path)
-
-        def sort_key(data_model: DataModel) -> Tuple[int, Tuple[str, ...]]:
-            return (len(data_model.module_path), tuple(data_model.module_path))
-
-        # process in reverse order to correctly establish module levels
-        grouped_models = groupby(
-            sorted(sorted_data_models.values(), key=sort_key, reverse=True),
-            key=module_key,
-        )
-
-        module_models: List[Tuple[Tuple[str, ...], List[DataModel]]] = []
-        unused_models: List[DataModel] = []
-        model_to_module_models: Dict[
-            DataModel, Tuple[Tuple[str, ...], List[DataModel]]
-        ] = {}
-        module_to_import: Dict[Tuple[str, ...], Imports] = {}
-
-        previous_module = ()  # type: Tuple[str, ...]
-        for module, models in ((k, [*v]) for k, v in grouped_models):  # type: Tuple[str, ...], List[DataModel]
-            for model in models:
-                model_to_module_models[model] = module, models
-            self.__delete_duplicate_models(models)
-            self.__replace_duplicate_name_in_module(models)
-            if len(previous_module) - len(module) > 1:
-                for parts in range(len(previous_module) - 1, len(module), -1):
-                    module_models.append(
-                        (
-                            previous_module[:parts],
-                            [],
-                        )
-                    )
-            module_models.append(
-                (
-                    module,
-                    models,
-                )
-            )
-            previous_module = module
-
-        class Processed(NamedTuple):
-            module: Tuple[str, ...]
-            models: List[DataModel]
-            init: bool
-            imports: Imports
-            scoped_model_resolver: ModelResolver
-
-        processed_models: List[Processed] = []
-
-        for module, models in module_models:
-            imports = module_to_import[module] = Imports(self.use_exact_imports)
-            init = False
-            if module:
-                parent = (*module[:-1], '__init__.py')
-                if parent not in results:
-                    results[parent] = Result(body='')
-                if (*module, '__init__.py') in results:
-                    module = (*module, '__init__.py')
-                    init = True
-                else:
-                    module = (*module[:-1], f'{module[-1]}.py')
-                    module = tuple(part.replace('-', '_') for part in module)
-            else:
-                module = ('__init__.py',)
-
-            scoped_model_resolver = ModelResolver()
-
-            self.__override_required_field(models)
-            self.__replace_unique_list_to_set(models)
-            self.__change_from_import(models, imports, scoped_model_resolver, init)
-            self.__extract_inherited_enum(models)
-            self.__set_reference_default_value_to_field(models)
-            self.__reuse_model(models, require_update_action_models)
-            self.__collapse_root_models(
-                models, unused_models, imports, scoped_model_resolver
-            )
-            self.__set_default_enum_member(models)
-            self.__sort_models(models, imports)
-            self.__apply_discriminator_type(models, imports)
-            self.__set_one_literal_on_default(models)
-
-            processed_models.append(
-                Processed(module, models, init, imports, scoped_model_resolver)
-            )
-
-        for processed_model in processed_models:
-            for model in processed_model.models:
-                processed_model.imports.append(model.imports)
-
-        for unused_model in unused_models:
-            module, models = model_to_module_models[unused_model]
-            if unused_model in models:  # pragma: no cover
-                imports = module_to_import[module]
-                imports.remove(unused_model.imports)
-                models.remove(unused_model)
-
-        for processed_model in processed_models:
-            # postprocess imports to remove unused imports.
-            model_code = str('\n'.join([str(m) for m in processed_model.models]))
-            unused_imports = [
-                (from_, import_)
-                for from_, imports_ in processed_model.imports.items()
-                for import_ in imports_
-                if import_ not in model_code
-            ]
-            for from_, import_ in unused_imports:
-                processed_model.imports.remove(Import(from_=from_, import_=import_))
-
-        for module, models, init, imports, scoped_model_resolver in processed_models:
-            # process after removing unused models
-            self.__change_imported_model_name(models, imports, scoped_model_resolver)
-
-        for module, models, init, imports, scoped_model_resolver in processed_models:
-            result: List[str] = []
-            if models:
-                if with_import:
-                    result += [str(self.imports), str(imports), '\n']
-
-                code = dump_templates(models)
-                result += [code]
-
-                if self.dump_resolve_reference_action is not None:
-                    result += [
-                        '\n',
-                        self.dump_resolve_reference_action(
-                            m.reference.short_name
-                            for m in models
-                            if m.path in require_update_action_models
-                        ),
-                    ]
-            if not result and not init:
-                continue
-            body = '\n'.join(result)
-            if code_formatter:
-                body = code_formatter.format_code(body)
-
-            results[module] = Result(
-                body=body, source=models[0].file_path if models else None
-            )
-
-        # retain existing behaviour
-        if [*results] == [('__init__.py',)]:
-            return results[('__init__.py',)].body
-
-        results = {tuple(i.replace('-', '_') for i in k): v for k, v in results.items()}
-        results = (
-            self.__postprocess_result_modules(results)
-            if self.treat_dots_as_module
-            else {
-                tuple(
-                    (
-                        part[: part.rfind('.')].replace('.', '_')
-                        + part[part.rfind('.') :]
-                    )
-                    for part in k
-                ): v
-                for k, v in results.items()
-            }
-        )
-
-        return results
diff -pruN 0.26.4-3/datamodel_code_generator/parser/graphql.py 0.45.0-1/datamodel_code_generator/parser/graphql.py
--- 0.26.4-3/datamodel_code_generator/parser/graphql.py	2024-12-15 17:25:57.706037000 +0000
+++ 0.45.0-1/datamodel_code_generator/parser/graphql.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,538 +0,0 @@
-from __future__ import annotations
-
-from pathlib import Path
-from typing import (
-    Any,
-    Callable,
-    DefaultDict,
-    Dict,
-    Iterable,
-    Iterator,
-    List,
-    Mapping,
-    Optional,
-    Sequence,
-    Set,
-    Tuple,
-    Type,
-    Union,
-)
-from urllib.parse import ParseResult
-
-from datamodel_code_generator import (
-    DefaultPutDict,
-    LiteralType,
-    PythonVersion,
-    snooper_to_methods,
-)
-from datamodel_code_generator.model import DataModel, DataModelFieldBase
-from datamodel_code_generator.model import pydantic as pydantic_model
-from datamodel_code_generator.model.enum import Enum
-from datamodel_code_generator.model.scalar import DataTypeScalar
-from datamodel_code_generator.model.union import DataTypeUnion
-from datamodel_code_generator.parser.base import (
-    DataType,
-    Parser,
-    Source,
-    escape_characters,
-)
-from datamodel_code_generator.reference import ModelType, Reference
-from datamodel_code_generator.types import DataTypeManager, StrictTypes, Types
-
-try:
-    import graphql
-except ImportError:  # pragma: no cover
-    raise Exception(
-        "Please run `$pip install 'datamodel-code-generator[graphql]`' to generate data-model from a GraphQL schema."
-    )
-
-from datamodel_code_generator.format import DatetimeClassType
-
-graphql_resolver = graphql.type.introspection.TypeResolvers()
-
-
-def build_graphql_schema(schema_str: str) -> graphql.GraphQLSchema:
-    """Build a graphql schema from a string."""
-    schema = graphql.build_schema(schema_str)
-    return graphql.lexicographic_sort_schema(schema)
-
-
-@snooper_to_methods(max_variable_length=None)
-class GraphQLParser(Parser):
-    # raw graphql schema as `graphql-core` object
-    raw_obj: graphql.GraphQLSchema
-    # all processed graphql objects
-    # mapper from an object name (unique) to an object
-    all_graphql_objects: Dict[str, graphql.GraphQLNamedType]
-    # a reference for each object
-    # mapper from an object name to his reference
-    references: Dict[str, Reference] = {}
-    # mapper from graphql type to all objects with this type
-    # `graphql.type.introspection.TypeKind` -- an enum with all supported types
-    # `graphql.GraphQLNamedType` -- base type for each graphql object
-    # see `graphql-core` for more details
-    support_graphql_types: Dict[
-        graphql.type.introspection.TypeKind, List[graphql.GraphQLNamedType]
-    ]
-    # graphql types order for render
-    # may be as a parameter in the future
-    parse_order: List[graphql.type.introspection.TypeKind] = [
-        graphql.type.introspection.TypeKind.SCALAR,
-        graphql.type.introspection.TypeKind.ENUM,
-        graphql.type.introspection.TypeKind.INTERFACE,
-        graphql.type.introspection.TypeKind.OBJECT,
-        graphql.type.introspection.TypeKind.INPUT_OBJECT,
-        graphql.type.introspection.TypeKind.UNION,
-    ]
-
-    def __init__(
-        self,
-        source: Union[str, Path, ParseResult],
-        *,
-        data_model_type: Type[DataModel] = pydantic_model.BaseModel,
-        data_model_root_type: Type[DataModel] = pydantic_model.CustomRootType,
-        data_model_scalar_type: Type[DataModel] = DataTypeScalar,
-        data_model_union_type: Type[DataModel] = DataTypeUnion,
-        data_type_manager_type: Type[DataTypeManager] = pydantic_model.DataTypeManager,
-        data_model_field_type: Type[DataModelFieldBase] = pydantic_model.DataModelField,
-        base_class: Optional[str] = None,
-        additional_imports: Optional[List[str]] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        target_python_version: PythonVersion = PythonVersion.PY_38,
-        dump_resolve_reference_action: Optional[Callable[[Iterable[str]], str]] = None,
-        validation: bool = False,
-        field_constraints: bool = False,
-        snake_case_field: bool = False,
-        strip_default_none: bool = False,
-        aliases: Optional[Mapping[str, str]] = None,
-        allow_population_by_field_name: bool = False,
-        apply_default_values_for_required_fields: bool = False,
-        allow_extra_fields: bool = False,
-        force_optional_for_required_fields: bool = False,
-        class_name: Optional[str] = None,
-        use_standard_collections: bool = False,
-        base_path: Optional[Path] = None,
-        use_schema_description: bool = False,
-        use_field_description: bool = False,
-        use_default_kwarg: bool = False,
-        reuse_model: bool = False,
-        encoding: str = 'utf-8',
-        enum_field_as_literal: Optional[LiteralType] = None,
-        set_default_enum_member: bool = False,
-        use_subclass_enum: bool = False,
-        strict_nullable: bool = False,
-        use_generic_container_types: bool = False,
-        enable_faux_immutability: bool = False,
-        remote_text_cache: Optional[DefaultPutDict[str, str]] = None,
-        disable_appending_item_suffix: bool = False,
-        strict_types: Optional[Sequence[StrictTypes]] = None,
-        empty_enum_field_name: Optional[str] = None,
-        custom_class_name_generator: Optional[Callable[[str], str]] = None,
-        field_extra_keys: Optional[Set[str]] = None,
-        field_include_all_keys: bool = False,
-        field_extra_keys_without_x_prefix: Optional[Set[str]] = None,
-        wrap_string_literal: Optional[bool] = None,
-        use_title_as_name: bool = False,
-        use_operation_id_as_name: bool = False,
-        use_unique_items_as_set: bool = False,
-        http_headers: Optional[Sequence[Tuple[str, str]]] = None,
-        http_ignore_tls: bool = False,
-        use_annotated: bool = False,
-        use_non_positive_negative_number_constrained_types: bool = False,
-        original_field_name_delimiter: Optional[str] = None,
-        use_double_quotes: bool = False,
-        use_union_operator: bool = False,
-        allow_responses_without_content: bool = False,
-        collapse_root_models: bool = False,
-        special_field_name_prefix: Optional[str] = None,
-        remove_special_field_name_prefix: bool = False,
-        capitalise_enum_members: bool = False,
-        keep_model_order: bool = False,
-        use_one_literal_as_default: bool = False,
-        known_third_party: Optional[List[str]] = None,
-        custom_formatters: Optional[List[str]] = None,
-        custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
-        use_pendulum: bool = False,
-        http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
-        treat_dots_as_module: bool = False,
-        use_exact_imports: bool = False,
-        default_field_extras: Optional[Dict[str, Any]] = None,
-        target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
-        keyword_only: bool = False,
-        no_alias: bool = False,
-    ) -> None:
-        super().__init__(
-            source=source,
-            data_model_type=data_model_type,
-            data_model_root_type=data_model_root_type,
-            data_type_manager_type=data_type_manager_type,
-            data_model_field_type=data_model_field_type,
-            base_class=base_class,
-            additional_imports=additional_imports,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            target_python_version=target_python_version,
-            dump_resolve_reference_action=dump_resolve_reference_action,
-            validation=validation,
-            field_constraints=field_constraints,
-            snake_case_field=snake_case_field,
-            strip_default_none=strip_default_none,
-            aliases=aliases,
-            allow_population_by_field_name=allow_population_by_field_name,
-            allow_extra_fields=allow_extra_fields,
-            apply_default_values_for_required_fields=apply_default_values_for_required_fields,
-            force_optional_for_required_fields=force_optional_for_required_fields,
-            class_name=class_name,
-            use_standard_collections=use_standard_collections,
-            base_path=base_path,
-            use_schema_description=use_schema_description,
-            use_field_description=use_field_description,
-            use_default_kwarg=use_default_kwarg,
-            reuse_model=reuse_model,
-            encoding=encoding,
-            enum_field_as_literal=enum_field_as_literal,
-            use_one_literal_as_default=use_one_literal_as_default,
-            set_default_enum_member=set_default_enum_member,
-            use_subclass_enum=use_subclass_enum,
-            strict_nullable=strict_nullable,
-            use_generic_container_types=use_generic_container_types,
-            enable_faux_immutability=enable_faux_immutability,
-            remote_text_cache=remote_text_cache,
-            disable_appending_item_suffix=disable_appending_item_suffix,
-            strict_types=strict_types,
-            empty_enum_field_name=empty_enum_field_name,
-            custom_class_name_generator=custom_class_name_generator,
-            field_extra_keys=field_extra_keys,
-            field_include_all_keys=field_include_all_keys,
-            field_extra_keys_without_x_prefix=field_extra_keys_without_x_prefix,
-            wrap_string_literal=wrap_string_literal,
-            use_title_as_name=use_title_as_name,
-            use_operation_id_as_name=use_operation_id_as_name,
-            use_unique_items_as_set=use_unique_items_as_set,
-            http_headers=http_headers,
-            http_ignore_tls=http_ignore_tls,
-            use_annotated=use_annotated,
-            use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
-            original_field_name_delimiter=original_field_name_delimiter,
-            use_double_quotes=use_double_quotes,
-            use_union_operator=use_union_operator,
-            allow_responses_without_content=allow_responses_without_content,
-            collapse_root_models=collapse_root_models,
-            special_field_name_prefix=special_field_name_prefix,
-            remove_special_field_name_prefix=remove_special_field_name_prefix,
-            capitalise_enum_members=capitalise_enum_members,
-            keep_model_order=keep_model_order,
-            known_third_party=known_third_party,
-            custom_formatters=custom_formatters,
-            custom_formatters_kwargs=custom_formatters_kwargs,
-            use_pendulum=use_pendulum,
-            http_query_parameters=http_query_parameters,
-            treat_dots_as_module=treat_dots_as_module,
-            use_exact_imports=use_exact_imports,
-            default_field_extras=default_field_extras,
-            target_datetime_class=target_datetime_class,
-            keyword_only=keyword_only,
-            no_alias=no_alias,
-        )
-
-        self.data_model_scalar_type = data_model_scalar_type
-        self.data_model_union_type = data_model_union_type
-        self.use_standard_collections = use_standard_collections
-        self.use_union_operator = use_union_operator
-
-    def _get_context_source_path_parts(self) -> Iterator[Tuple[Source, List[str]]]:
-        # TODO (denisart): Temporarily this method duplicates
-        # the method `datamodel_code_generator.parser.jsonschema.JsonSchemaParser._get_context_source_path_parts`.
-
-        if isinstance(self.source, list) or (  # pragma: no cover
-            isinstance(self.source, Path) and self.source.is_dir()
-        ):  # pragma: no cover
-            self.current_source_path = Path()
-            self.model_resolver.after_load_files = {
-                self.base_path.joinpath(s.path).resolve().as_posix()
-                for s in self.iter_source
-            }
-
-        for source in self.iter_source:
-            if isinstance(self.source, ParseResult):  # pragma: no cover
-                path_parts = self.get_url_path_parts(self.source)
-            else:
-                path_parts = list(source.path.parts)
-            if self.current_source_path is not None:  # pragma: no cover
-                self.current_source_path = source.path
-            with self.model_resolver.current_base_path_context(
-                source.path.parent
-            ), self.model_resolver.current_root_context(path_parts):
-                yield source, path_parts
-
-    def _resolve_types(self, paths: List[str], schema: graphql.GraphQLSchema) -> None:
-        for type_name, type_ in schema.type_map.items():
-            if type_name.startswith('__'):
-                continue
-
-            if type_name in ['Query', 'Mutation']:
-                continue
-
-            resolved_type = graphql_resolver.kind(type_, None)
-
-            if resolved_type in self.support_graphql_types:  # pragma: no cover
-                self.all_graphql_objects[type_.name] = type_
-                # TODO: need a special method for each graph type
-                self.references[type_.name] = Reference(
-                    path=f'{str(*paths)}/{resolved_type.value}/{type_.name}',
-                    name=type_.name,
-                    original_name=type_.name,
-                )
-
-                self.support_graphql_types[resolved_type].append(type_)
-
-    def _typename_field(self, name: str) -> DataModelFieldBase:
-        return self.data_model_field_type(
-            name='typename__',
-            data_type=DataType(
-                literals=[name],
-                use_union_operator=self.use_union_operator,
-                use_standard_collections=self.use_standard_collections,
-            ),
-            default=name,
-            use_annotated=self.use_annotated,
-            required=False,
-            alias='__typename',
-            use_one_literal_as_default=True,
-            has_default=True,
-        )
-
-    def _get_default(
-        self,
-        field: Union[graphql.GraphQLField, graphql.GraphQLInputField],
-        final_data_type: DataType,
-        required: bool,
-    ) -> Any:
-        if isinstance(field, graphql.GraphQLInputField):  # pragma: no cover
-            if field.default_value == graphql.pyutils.Undefined:  # pragma: no cover
-                return None
-            return field.default_value
-        if required is False:
-            if final_data_type.is_list:
-                return None
-
-        return None
-
-    def parse_scalar(self, scalar_graphql_object: graphql.GraphQLScalarType) -> None:
-        self.results.append(
-            self.data_model_scalar_type(
-                reference=self.references[scalar_graphql_object.name],
-                fields=[],
-                custom_template_dir=self.custom_template_dir,
-                extra_template_data=self.extra_template_data,
-                description=scalar_graphql_object.description,
-            )
-        )
-
-    def parse_enum(self, enum_object: graphql.GraphQLEnumType) -> None:
-        enum_fields: List[DataModelFieldBase] = []
-        exclude_field_names: Set[str] = set()
-
-        for value_name, value in enum_object.values.items():
-            default = (
-                f"'{value_name.translate(escape_characters)}'"
-                if isinstance(value_name, str)
-                else value_name
-            )
-
-            field_name = self.model_resolver.get_valid_field_name(
-                value_name, excludes=exclude_field_names, model_type=ModelType.ENUM
-            )
-            exclude_field_names.add(field_name)
-
-            enum_fields.append(
-                self.data_model_field_type(
-                    name=field_name,
-                    data_type=self.data_type_manager.get_data_type(
-                        Types.string,
-                    ),
-                    default=default,
-                    required=True,
-                    strip_default_none=self.strip_default_none,
-                    has_default=True,
-                    use_field_description=value.description is not None,
-                    original_name=None,
-                )
-            )
-
-        enum = Enum(
-            reference=self.references[enum_object.name],
-            fields=enum_fields,
-            path=self.current_source_path,
-            description=enum_object.description,
-            custom_template_dir=self.custom_template_dir,
-        )
-        self.results.append(enum)
-
-    def parse_field(
-        self,
-        field_name: str,
-        alias: str,
-        field: Union[graphql.GraphQLField, graphql.GraphQLInputField],
-    ) -> DataModelFieldBase:
-        final_data_type = DataType(
-            is_optional=True,
-            use_union_operator=self.use_union_operator,
-            use_standard_collections=self.use_standard_collections,
-        )
-        data_type = final_data_type
-        obj = field.type
-
-        while graphql.is_list_type(obj) or graphql.is_non_null_type(obj):
-            if graphql.is_list_type(obj):
-                data_type.is_list = True
-
-                new_data_type = DataType(
-                    is_optional=True,
-                    use_union_operator=self.use_union_operator,
-                    use_standard_collections=self.use_standard_collections,
-                )
-                data_type.data_types = [new_data_type]
-
-                data_type = new_data_type
-            elif graphql.is_non_null_type(obj):  # pragma: no cover
-                data_type.is_optional = False
-
-            obj = obj.of_type
-
-        data_type.type = obj.name
-
-        required = (not self.force_optional_for_required_fields) and (
-            not final_data_type.is_optional
-        )
-
-        default = self._get_default(field, final_data_type, required)
-        extras = (
-            {}
-            if self.default_field_extras is None
-            else self.default_field_extras.copy()
-        )
-
-        if field.description is not None:  # pragma: no cover
-            extras['description'] = field.description
-
-        return self.data_model_field_type(
-            name=field_name,
-            default=default,
-            data_type=final_data_type,
-            required=required,
-            extras=extras,
-            alias=alias,
-            strip_default_none=self.strip_default_none,
-            use_annotated=self.use_annotated,
-            use_field_description=self.use_field_description,
-            use_default_kwarg=self.use_default_kwarg,
-            original_name=field_name,
-            has_default=default is not None,
-        )
-
-    def parse_object_like(
-        self,
-        obj: Union[
-            graphql.GraphQLInterfaceType,
-            graphql.GraphQLObjectType,
-            graphql.GraphQLInputObjectType,
-        ],
-    ) -> None:
-        fields = []
-        exclude_field_names: Set[str] = set()
-
-        for field_name, field in obj.fields.items():
-            field_name_, alias = self.model_resolver.get_valid_field_name_and_alias(
-                field_name, excludes=exclude_field_names
-            )
-            exclude_field_names.add(field_name_)
-
-            data_model_field_type = self.parse_field(field_name_, alias, field)
-            fields.append(data_model_field_type)
-
-        fields.append(self._typename_field(obj.name))
-
-        base_classes = []
-        if hasattr(obj, 'interfaces'):  # pragma: no cover
-            base_classes = [self.references[i.name] for i in obj.interfaces]
-
-        data_model_type = self.data_model_type(
-            reference=self.references[obj.name],
-            fields=fields,
-            base_classes=base_classes,
-            custom_base_class=self.base_class,
-            custom_template_dir=self.custom_template_dir,
-            extra_template_data=self.extra_template_data,
-            path=self.current_source_path,
-            description=obj.description,
-            keyword_only=self.keyword_only,
-        )
-        self.results.append(data_model_type)
-
-    def parse_interface(
-        self, interface_graphql_object: graphql.GraphQLInterfaceType
-    ) -> None:
-        self.parse_object_like(interface_graphql_object)
-
-    def parse_object(self, graphql_object: graphql.GraphQLObjectType) -> None:
-        self.parse_object_like(graphql_object)
-
-    def parse_input_object(
-        self, input_graphql_object: graphql.GraphQLInputObjectType
-    ) -> None:
-        self.parse_object_like(input_graphql_object)  # pragma: no cover
-
-    def parse_union(self, union_object: graphql.GraphQLUnionType) -> None:
-        fields = []
-
-        for type_ in union_object.types:
-            fields.append(
-                self.data_model_field_type(name=type_.name, data_type=DataType())
-            )
-
-        data_model_type = self.data_model_union_type(
-            reference=self.references[union_object.name],
-            fields=fields,
-            custom_base_class=self.base_class,
-            custom_template_dir=self.custom_template_dir,
-            extra_template_data=self.extra_template_data,
-            path=self.current_source_path,
-            description=union_object.description,
-        )
-        self.results.append(data_model_type)
-
-    def parse_raw(self) -> None:
-        self.all_graphql_objects = {}
-        self.references: Dict[str, Reference] = {}
-
-        self.support_graphql_types = {
-            graphql.type.introspection.TypeKind.SCALAR: [],
-            graphql.type.introspection.TypeKind.ENUM: [],
-            graphql.type.introspection.TypeKind.UNION: [],
-            graphql.type.introspection.TypeKind.INTERFACE: [],
-            graphql.type.introspection.TypeKind.OBJECT: [],
-            graphql.type.introspection.TypeKind.INPUT_OBJECT: [],
-        }
-
-        # may be as a parameter in the future (??)
-        _mapper_from_graphql_type_to_parser_method = {
-            graphql.type.introspection.TypeKind.SCALAR: self.parse_scalar,
-            graphql.type.introspection.TypeKind.ENUM: self.parse_enum,
-            graphql.type.introspection.TypeKind.INTERFACE: self.parse_interface,
-            graphql.type.introspection.TypeKind.OBJECT: self.parse_object,
-            graphql.type.introspection.TypeKind.INPUT_OBJECT: self.parse_input_object,
-            graphql.type.introspection.TypeKind.UNION: self.parse_union,
-        }
-
-        for source, path_parts in self._get_context_source_path_parts():
-            schema: graphql.GraphQLSchema = build_graphql_schema(source.text)
-            self.raw_obj = schema
-
-            self._resolve_types(path_parts, schema)
-
-            for next_type in self.parse_order:
-                for obj in self.support_graphql_types[next_type]:
-                    parser_ = _mapper_from_graphql_type_to_parser_method[next_type]
-                    parser_(obj)  # type: ignore
diff -pruN 0.26.4-3/datamodel_code_generator/parser/jsonschema.py 0.45.0-1/datamodel_code_generator/parser/jsonschema.py
--- 0.26.4-3/datamodel_code_generator/parser/jsonschema.py	2024-12-15 17:25:57.706037000 +0000
+++ 0.45.0-1/datamodel_code_generator/parser/jsonschema.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,1846 +0,0 @@
-from __future__ import annotations
-
-import enum as _enum
-from collections import defaultdict
-from contextlib import contextmanager
-from functools import lru_cache
-from pathlib import Path
-from typing import (
-    TYPE_CHECKING,
-    Any,
-    Callable,
-    ClassVar,
-    DefaultDict,
-    Dict,
-    Generator,
-    Iterable,
-    Iterator,
-    List,
-    Mapping,
-    Optional,
-    Sequence,
-    Set,
-    Tuple,
-    Type,
-    Union,
-)
-from urllib.parse import ParseResult
-from warnings import warn
-
-from pydantic import (
-    Field,
-)
-
-from datamodel_code_generator import (
-    InvalidClassNameError,
-    load_yaml,
-    load_yaml_from_path,
-    snooper_to_methods,
-)
-from datamodel_code_generator.format import PythonVersion
-from datamodel_code_generator.model import DataModel, DataModelFieldBase
-from datamodel_code_generator.model import pydantic as pydantic_model
-from datamodel_code_generator.model.base import UNDEFINED, get_module_name
-from datamodel_code_generator.model.enum import Enum
-from datamodel_code_generator.parser import DefaultPutDict, LiteralType
-from datamodel_code_generator.parser.base import (
-    SPECIAL_PATH_FORMAT,
-    Parser,
-    Source,
-    escape_characters,
-    get_special_path,
-    title_to_class_name,
-)
-from datamodel_code_generator.reference import ModelType, Reference, is_url
-from datamodel_code_generator.types import (
-    DataType,
-    DataTypeManager,
-    EmptyDataType,
-    StrictTypes,
-    Types,
-    UnionIntFloat,
-)
-from datamodel_code_generator.util import (
-    PYDANTIC_V2,
-    BaseModel,
-    cached_property,
-    field_validator,
-    model_validator,
-)
-
-if PYDANTIC_V2:
-    from pydantic import ConfigDict
-
-from datamodel_code_generator.format import DatetimeClassType
-
-
-def get_model_by_path(
-    schema: Union[Dict[str, Any], List[Any]], keys: Union[List[str], List[int]]
-) -> Dict[Any, Any]:
-    model: Union[Dict[Any, Any], List[Any]]
-    if not keys:
-        model = schema
-    elif len(keys) == 1:
-        if isinstance(schema, dict):
-            model = schema.get(keys[0], {})  # type: ignore
-        else:  # pragma: no cover
-            model = schema[int(keys[0])]
-    elif isinstance(schema, dict):
-        model = get_model_by_path(schema[keys[0]], keys[1:])  # type: ignore
-    else:
-        model = get_model_by_path(schema[int(keys[0])], keys[1:])
-    if isinstance(model, dict):
-        return model
-    raise NotImplementedError(  # pragma: no cover
-        f'Does not support json pointer to array. schema={schema}, key={keys}'
-    )
-
-
-json_schema_data_formats: Dict[str, Dict[str, Types]] = {
-    'integer': {
-        'int32': Types.int32,
-        'int64': Types.int64,
-        'default': Types.integer,
-        'date-time': Types.date_time,
-        'unix-time': Types.int64,
-    },
-    'number': {
-        'float': Types.float,
-        'double': Types.double,
-        'decimal': Types.decimal,
-        'date-time': Types.date_time,
-        'time': Types.time,
-        'default': Types.number,
-    },
-    'string': {
-        'default': Types.string,
-        'byte': Types.byte,  # base64 encoded string
-        'binary': Types.binary,
-        'date': Types.date,
-        'date-time': Types.date_time,
-        'duration': Types.timedelta,
-        'time': Types.time,
-        'password': Types.password,
-        'path': Types.path,
-        'email': Types.email,
-        'idn-email': Types.email,
-        'uuid': Types.uuid,
-        'uuid1': Types.uuid1,
-        'uuid2': Types.uuid2,
-        'uuid3': Types.uuid3,
-        'uuid4': Types.uuid4,
-        'uuid5': Types.uuid5,
-        'uri': Types.uri,
-        'uri-reference': Types.string,
-        'hostname': Types.hostname,
-        'ipv4': Types.ipv4,
-        'ipv4-network': Types.ipv4_network,
-        'ipv6': Types.ipv6,
-        'ipv6-network': Types.ipv6_network,
-        'decimal': Types.decimal,
-        'integer': Types.integer,
-    },
-    'boolean': {'default': Types.boolean},
-    'object': {'default': Types.object},
-    'null': {'default': Types.null},
-    'array': {'default': Types.array},
-}
-
-
-class JSONReference(_enum.Enum):
-    LOCAL = 'LOCAL'
-    REMOTE = 'REMOTE'
-    URL = 'URL'
-
-
-class Discriminator(BaseModel):
-    propertyName: str
-    mapping: Optional[Dict[str, str]] = None
-
-
-class JsonSchemaObject(BaseModel):
-    if not TYPE_CHECKING:
-        if PYDANTIC_V2:
-
-            @classmethod
-            def get_fields(cls) -> Dict[str, Any]:
-                return cls.model_fields
-
-        else:
-
-            @classmethod
-            def get_fields(cls) -> Dict[str, Any]:
-                return cls.__fields__
-
-            @classmethod
-            def model_rebuild(cls) -> None:
-                cls.update_forward_refs()
-
-    __constraint_fields__: Set[str] = {
-        'exclusiveMinimum',
-        'minimum',
-        'exclusiveMaximum',
-        'maximum',
-        'multipleOf',
-        'minItems',
-        'maxItems',
-        'minLength',
-        'maxLength',
-        'pattern',
-        'uniqueItems',
-    }
-    __extra_key__: str = SPECIAL_PATH_FORMAT.format('extras')
-
-    @model_validator(mode='before')
-    def validate_exclusive_maximum_and_exclusive_minimum(cls, values: Any) -> Any:
-        if not isinstance(values, dict):
-            return values
-        exclusive_maximum: Union[float, bool, None] = values.get('exclusiveMaximum')
-        exclusive_minimum: Union[float, bool, None] = values.get('exclusiveMinimum')
-
-        if exclusive_maximum is True:
-            values['exclusiveMaximum'] = values['maximum']
-            del values['maximum']
-        elif exclusive_maximum is False:
-            del values['exclusiveMaximum']
-        if exclusive_minimum is True:
-            values['exclusiveMinimum'] = values['minimum']
-            del values['minimum']
-        elif exclusive_minimum is False:
-            del values['exclusiveMinimum']
-        return values
-
-    @field_validator('ref')
-    def validate_ref(cls, value: Any) -> Any:
-        if isinstance(value, str) and '#' in value:
-            if value.endswith('#/'):
-                return value[:-1]
-            elif '#/' in value or value[0] == '#' or value[-1] == '#':
-                return value
-            return value.replace('#', '#/')
-        return value
-
-    items: Union[List[JsonSchemaObject], JsonSchemaObject, bool, None] = None
-    uniqueItems: Optional[bool] = None
-    type: Union[str, List[str], None] = None
-    format: Optional[str] = None
-    pattern: Optional[str] = None
-    minLength: Optional[int] = None
-    maxLength: Optional[int] = None
-    minimum: Optional[UnionIntFloat] = None
-    maximum: Optional[UnionIntFloat] = None
-    minItems: Optional[int] = None
-    maxItems: Optional[int] = None
-    multipleOf: Optional[float] = None
-    exclusiveMaximum: Union[float, bool, None] = None
-    exclusiveMinimum: Union[float, bool, None] = None
-    additionalProperties: Union[JsonSchemaObject, bool, None] = None
-    patternProperties: Optional[Dict[str, JsonSchemaObject]] = None
-    oneOf: List[JsonSchemaObject] = []
-    anyOf: List[JsonSchemaObject] = []
-    allOf: List[JsonSchemaObject] = []
-    enum: List[Any] = []
-    writeOnly: Optional[bool] = None
-    readOnly: Optional[bool] = None
-    properties: Optional[Dict[str, Union[JsonSchemaObject, bool]]] = None
-    required: List[str] = []
-    ref: Optional[str] = Field(default=None, alias='$ref')
-    nullable: Optional[bool] = False
-    x_enum_varnames: List[str] = Field(default=[], alias='x-enum-varnames')
-    description: Optional[str] = None
-    title: Optional[str] = None
-    example: Any = None
-    examples: Any = None
-    default: Any = None
-    id: Optional[str] = Field(default=None, alias='$id')
-    custom_type_path: Optional[str] = Field(default=None, alias='customTypePath')
-    custom_base_path: Optional[str] = Field(default=None, alias='customBasePath')
-    extras: Dict[str, Any] = Field(alias=__extra_key__, default_factory=dict)
-    discriminator: Union[Discriminator, str, None] = None
-    if PYDANTIC_V2:
-        model_config = ConfigDict(
-            arbitrary_types_allowed=True,
-            ignored_types=(cached_property,),
-        )
-    else:
-
-        class Config:
-            arbitrary_types_allowed = True
-            keep_untouched = (cached_property,)
-            smart_casts = True
-
-    if not TYPE_CHECKING:
-
-        def __init__(self, **data: Any) -> None:
-            super().__init__(**data)
-            self.extras = {k: v for k, v in data.items() if k not in EXCLUDE_FIELD_KEYS}
-            if 'const' in data.get(self.__extra_key__, {}):
-                self.extras['const'] = data[self.__extra_key__]['const']
-
-    @cached_property
-    def is_object(self) -> bool:
-        return (
-            self.properties is not None
-            or self.type == 'object'
-            and not self.allOf
-            and not self.oneOf
-            and not self.anyOf
-            and not self.ref
-        )
-
-    @cached_property
-    def is_array(self) -> bool:
-        return self.items is not None or self.type == 'array'
-
-    @cached_property
-    def ref_object_name(self) -> str:  # pragma: no cover
-        return self.ref.rsplit('/', 1)[-1]  # type: ignore
-
-    @field_validator('items', mode='before')
-    def validate_items(cls, values: Any) -> Any:
-        # this condition expects empty dict
-        return values or None
-
-    @cached_property
-    def has_default(self) -> bool:
-        return 'default' in self.__fields_set__ or 'default_factory' in self.extras
-
-    @cached_property
-    def has_constraint(self) -> bool:
-        return bool(self.__constraint_fields__ & self.__fields_set__)
-
-    @cached_property
-    def ref_type(self) -> Optional[JSONReference]:
-        if self.ref:
-            return get_ref_type(self.ref)
-        return None  # pragma: no cover
-
-    @cached_property
-    def type_has_null(self) -> bool:
-        return isinstance(self.type, list) and 'null' in self.type
-
-
-@lru_cache()
-def get_ref_type(ref: str) -> JSONReference:
-    if ref[0] == '#':
-        return JSONReference.LOCAL
-    elif is_url(ref):
-        return JSONReference.URL
-    return JSONReference.REMOTE
-
-
-def _get_type(type_: str, format__: Optional[str] = None) -> Types:
-    if type_ not in json_schema_data_formats:
-        return Types.any
-    data_formats: Optional[Types] = json_schema_data_formats[type_].get(
-        'default' if format__ is None else format__
-    )
-    if data_formats is not None:
-        return data_formats
-
-    warn(f'format of {format__!r} not understood for {type_!r} - using default' '')
-    return json_schema_data_formats[type_]['default']
-
-
-JsonSchemaObject.model_rebuild()
-
-DEFAULT_FIELD_KEYS: Set[str] = {
-    'example',
-    'examples',
-    'description',
-    'discriminator',
-    'title',
-    'const',
-    'default_factory',
-}
-
-EXCLUDE_FIELD_KEYS_IN_JSON_SCHEMA: Set[str] = {
-    'readOnly',
-    'writeOnly',
-}
-
-EXCLUDE_FIELD_KEYS = (
-    set(JsonSchemaObject.get_fields())
-    - DEFAULT_FIELD_KEYS
-    - EXCLUDE_FIELD_KEYS_IN_JSON_SCHEMA
-) | {
-    '$id',
-    '$ref',
-    JsonSchemaObject.__extra_key__,
-}
-
-
-@snooper_to_methods(max_variable_length=None)
-class JsonSchemaParser(Parser):
-    SCHEMA_PATHS: ClassVar[List[str]] = ['#/definitions', '#/$defs']
-    SCHEMA_OBJECT_TYPE: ClassVar[Type[JsonSchemaObject]] = JsonSchemaObject
-
-    def __init__(
-        self,
-        source: Union[str, Path, List[Path], ParseResult],
-        *,
-        data_model_type: Type[DataModel] = pydantic_model.BaseModel,
-        data_model_root_type: Type[DataModel] = pydantic_model.CustomRootType,
-        data_type_manager_type: Type[DataTypeManager] = pydantic_model.DataTypeManager,
-        data_model_field_type: Type[DataModelFieldBase] = pydantic_model.DataModelField,
-        base_class: Optional[str] = None,
-        additional_imports: Optional[List[str]] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        target_python_version: PythonVersion = PythonVersion.PY_38,
-        dump_resolve_reference_action: Optional[Callable[[Iterable[str]], str]] = None,
-        validation: bool = False,
-        field_constraints: bool = False,
-        snake_case_field: bool = False,
-        strip_default_none: bool = False,
-        aliases: Optional[Mapping[str, str]] = None,
-        allow_population_by_field_name: bool = False,
-        apply_default_values_for_required_fields: bool = False,
-        allow_extra_fields: bool = False,
-        force_optional_for_required_fields: bool = False,
-        class_name: Optional[str] = None,
-        use_standard_collections: bool = False,
-        base_path: Optional[Path] = None,
-        use_schema_description: bool = False,
-        use_field_description: bool = False,
-        use_default_kwarg: bool = False,
-        reuse_model: bool = False,
-        encoding: str = 'utf-8',
-        enum_field_as_literal: Optional[LiteralType] = None,
-        use_one_literal_as_default: bool = False,
-        set_default_enum_member: bool = False,
-        use_subclass_enum: bool = False,
-        strict_nullable: bool = False,
-        use_generic_container_types: bool = False,
-        enable_faux_immutability: bool = False,
-        remote_text_cache: Optional[DefaultPutDict[str, str]] = None,
-        disable_appending_item_suffix: bool = False,
-        strict_types: Optional[Sequence[StrictTypes]] = None,
-        empty_enum_field_name: Optional[str] = None,
-        custom_class_name_generator: Optional[Callable[[str], str]] = None,
-        field_extra_keys: Optional[Set[str]] = None,
-        field_include_all_keys: bool = False,
-        field_extra_keys_without_x_prefix: Optional[Set[str]] = None,
-        wrap_string_literal: Optional[bool] = None,
-        use_title_as_name: bool = False,
-        use_operation_id_as_name: bool = False,
-        use_unique_items_as_set: bool = False,
-        http_headers: Optional[Sequence[Tuple[str, str]]] = None,
-        http_ignore_tls: bool = False,
-        use_annotated: bool = False,
-        use_non_positive_negative_number_constrained_types: bool = False,
-        original_field_name_delimiter: Optional[str] = None,
-        use_double_quotes: bool = False,
-        use_union_operator: bool = False,
-        allow_responses_without_content: bool = False,
-        collapse_root_models: bool = False,
-        special_field_name_prefix: Optional[str] = None,
-        remove_special_field_name_prefix: bool = False,
-        capitalise_enum_members: bool = False,
-        keep_model_order: bool = False,
-        known_third_party: Optional[List[str]] = None,
-        custom_formatters: Optional[List[str]] = None,
-        custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
-        use_pendulum: bool = False,
-        http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
-        treat_dots_as_module: bool = False,
-        use_exact_imports: bool = False,
-        default_field_extras: Optional[Dict[str, Any]] = None,
-        target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
-        keyword_only: bool = False,
-        no_alias: bool = False,
-    ) -> None:
-        super().__init__(
-            source=source,
-            data_model_type=data_model_type,
-            data_model_root_type=data_model_root_type,
-            data_type_manager_type=data_type_manager_type,
-            data_model_field_type=data_model_field_type,
-            base_class=base_class,
-            additional_imports=additional_imports,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            target_python_version=target_python_version,
-            dump_resolve_reference_action=dump_resolve_reference_action,
-            validation=validation,
-            field_constraints=field_constraints,
-            snake_case_field=snake_case_field,
-            strip_default_none=strip_default_none,
-            aliases=aliases,
-            allow_population_by_field_name=allow_population_by_field_name,
-            allow_extra_fields=allow_extra_fields,
-            apply_default_values_for_required_fields=apply_default_values_for_required_fields,
-            force_optional_for_required_fields=force_optional_for_required_fields,
-            class_name=class_name,
-            use_standard_collections=use_standard_collections,
-            base_path=base_path,
-            use_schema_description=use_schema_description,
-            use_field_description=use_field_description,
-            use_default_kwarg=use_default_kwarg,
-            reuse_model=reuse_model,
-            encoding=encoding,
-            enum_field_as_literal=enum_field_as_literal,
-            use_one_literal_as_default=use_one_literal_as_default,
-            set_default_enum_member=set_default_enum_member,
-            use_subclass_enum=use_subclass_enum,
-            strict_nullable=strict_nullable,
-            use_generic_container_types=use_generic_container_types,
-            enable_faux_immutability=enable_faux_immutability,
-            remote_text_cache=remote_text_cache,
-            disable_appending_item_suffix=disable_appending_item_suffix,
-            strict_types=strict_types,
-            empty_enum_field_name=empty_enum_field_name,
-            custom_class_name_generator=custom_class_name_generator,
-            field_extra_keys=field_extra_keys,
-            field_include_all_keys=field_include_all_keys,
-            field_extra_keys_without_x_prefix=field_extra_keys_without_x_prefix,
-            wrap_string_literal=wrap_string_literal,
-            use_title_as_name=use_title_as_name,
-            use_operation_id_as_name=use_operation_id_as_name,
-            use_unique_items_as_set=use_unique_items_as_set,
-            http_headers=http_headers,
-            http_ignore_tls=http_ignore_tls,
-            use_annotated=use_annotated,
-            use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
-            original_field_name_delimiter=original_field_name_delimiter,
-            use_double_quotes=use_double_quotes,
-            use_union_operator=use_union_operator,
-            allow_responses_without_content=allow_responses_without_content,
-            collapse_root_models=collapse_root_models,
-            special_field_name_prefix=special_field_name_prefix,
-            remove_special_field_name_prefix=remove_special_field_name_prefix,
-            capitalise_enum_members=capitalise_enum_members,
-            keep_model_order=keep_model_order,
-            known_third_party=known_third_party,
-            custom_formatters=custom_formatters,
-            custom_formatters_kwargs=custom_formatters_kwargs,
-            use_pendulum=use_pendulum,
-            http_query_parameters=http_query_parameters,
-            treat_dots_as_module=treat_dots_as_module,
-            use_exact_imports=use_exact_imports,
-            default_field_extras=default_field_extras,
-            target_datetime_class=target_datetime_class,
-            keyword_only=keyword_only,
-            no_alias=no_alias,
-        )
-
-        self.remote_object_cache: DefaultPutDict[str, Dict[str, Any]] = DefaultPutDict()
-        self.raw_obj: Dict[Any, Any] = {}
-        self._root_id: Optional[str] = None
-        self._root_id_base_path: Optional[str] = None
-        self.reserved_refs: DefaultDict[Tuple[str], Set[str]] = defaultdict(set)
-        self.field_keys: Set[str] = {
-            *DEFAULT_FIELD_KEYS,
-            *self.field_extra_keys,
-            *self.field_extra_keys_without_x_prefix,
-        }
-
-        if self.data_model_field_type.can_have_extra_keys:
-            self.get_field_extra_key: Callable[[str], str] = (
-                lambda key: self.model_resolver.get_valid_field_name_and_alias(key)[0]
-            )
-
-        else:
-            self.get_field_extra_key = lambda key: key
-
-    def get_field_extras(self, obj: JsonSchemaObject) -> Dict[str, Any]:
-        if self.field_include_all_keys:
-            extras = {
-                self.get_field_extra_key(
-                    k.lstrip('x-') if k in self.field_extra_keys_without_x_prefix else k
-                ): v
-                for k, v in obj.extras.items()
-            }
-        else:
-            extras = {
-                self.get_field_extra_key(
-                    k.lstrip('x-') if k in self.field_extra_keys_without_x_prefix else k
-                ): v
-                for k, v in obj.extras.items()
-                if k in self.field_keys
-            }
-        if self.default_field_extras:
-            extras.update(self.default_field_extras)
-        return extras
-
-    @cached_property
-    def schema_paths(self) -> List[Tuple[str, List[str]]]:
-        return [(s, s.lstrip('#/').split('/')) for s in self.SCHEMA_PATHS]
-
-    @property
-    def root_id(self) -> Optional[str]:
-        return self.model_resolver.root_id
-
-    @root_id.setter
-    def root_id(self, value: Optional[str]) -> None:
-        self.model_resolver.set_root_id(value)
-
-    def should_parse_enum_as_literal(self, obj: JsonSchemaObject) -> bool:
-        return self.enum_field_as_literal == LiteralType.All or (
-            self.enum_field_as_literal == LiteralType.One and len(obj.enum) == 1
-        )
-
-    def is_constraints_field(self, obj: JsonSchemaObject) -> bool:
-        return obj.is_array or (
-            self.field_constraints
-            and not (
-                obj.ref
-                or obj.anyOf
-                or obj.oneOf
-                or obj.allOf
-                or obj.is_object
-                or obj.enum
-            )
-        )
-
-    def get_object_field(
-        self,
-        *,
-        field_name: Optional[str],
-        field: JsonSchemaObject,
-        required: bool,
-        field_type: DataType,
-        alias: Optional[str],
-        original_field_name: Optional[str],
-    ) -> DataModelFieldBase:
-        return self.data_model_field_type(
-            name=field_name,
-            default=field.default,
-            data_type=field_type,
-            required=required,
-            alias=alias,
-            constraints=field.dict() if self.is_constraints_field(field) else None,
-            nullable=field.nullable
-            if self.strict_nullable and (field.has_default or required)
-            else None,
-            strip_default_none=self.strip_default_none,
-            extras=self.get_field_extras(field),
-            use_annotated=self.use_annotated,
-            use_field_description=self.use_field_description,
-            use_default_kwarg=self.use_default_kwarg,
-            original_name=original_field_name,
-            has_default=field.has_default,
-            type_has_null=field.type_has_null,
-        )
-
-    def get_data_type(self, obj: JsonSchemaObject) -> DataType:
-        if obj.type is None:
-            if 'const' in obj.extras:
-                return self.data_type_manager.get_data_type_from_value(
-                    obj.extras['const']
-                )
-            return self.data_type_manager.get_data_type(
-                Types.any,
-            )
-
-        def _get_data_type(type_: str, format__: str) -> DataType:
-            return self.data_type_manager.get_data_type(
-                _get_type(type_, format__),
-                **obj.dict() if not self.field_constraints else {},
-            )
-
-        if isinstance(obj.type, list):
-            return self.data_type(
-                data_types=[
-                    _get_data_type(t, obj.format or 'default')
-                    for t in obj.type
-                    if t != 'null'
-                ],
-                is_optional='null' in obj.type,
-            )
-        return _get_data_type(obj.type, obj.format or 'default')
-
-    def get_ref_data_type(self, ref: str) -> DataType:
-        reference = self.model_resolver.add_ref(ref)
-        return self.data_type(reference=reference)
-
-    def set_additional_properties(self, name: str, obj: JsonSchemaObject) -> None:
-        if isinstance(obj.additionalProperties, bool):
-            self.extra_template_data[name]['additionalProperties'] = (
-                obj.additionalProperties
-            )
-
-    def set_title(self, name: str, obj: JsonSchemaObject) -> None:
-        if obj.title:
-            self.extra_template_data[name]['title'] = obj.title
-
-    def _deep_merge(
-        self, dict1: Dict[Any, Any], dict2: Dict[Any, Any]
-    ) -> Dict[Any, Any]:
-        result = dict1.copy()
-        for key, value in dict2.items():
-            if key in result:
-                if isinstance(result[key], dict) and isinstance(value, dict):
-                    result[key] = self._deep_merge(result[key], value)
-                    continue
-                elif isinstance(result[key], list) and isinstance(value, list):
-                    result[key] = result[key] + value
-                    continue
-            result[key] = value
-        return result
-
-    def parse_combined_schema(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-        target_attribute_name: str,
-    ) -> List[DataType]:
-        base_object = obj.dict(
-            exclude={target_attribute_name}, exclude_unset=True, by_alias=True
-        )
-        combined_schemas: List[JsonSchemaObject] = []
-        refs = []
-        for index, target_attribute in enumerate(
-            getattr(obj, target_attribute_name, [])
-        ):
-            if target_attribute.ref:
-                combined_schemas.append(target_attribute)
-                refs.append(index)
-                # TODO: support partial ref
-                # {
-                #   "type": "integer",
-                #   "oneOf": [
-                #     { "minimum": 5 },
-                #     { "$ref": "#/definitions/positive" }
-                #   ],
-                #    "definitions": {
-                #     "positive": {
-                #       "minimum": 0,
-                #       "exclusiveMinimum": true
-                #     }
-                #    }
-                # }
-            else:
-                combined_schemas.append(
-                    self.SCHEMA_OBJECT_TYPE.parse_obj(
-                        self._deep_merge(
-                            base_object,
-                            target_attribute.dict(exclude_unset=True, by_alias=True),
-                        )
-                    )
-                )
-
-        parsed_schemas = self.parse_list_item(
-            name,
-            combined_schemas,
-            path,
-            obj,
-            singular_name=False,
-        )
-        common_path_keyword = f'{target_attribute_name}Common'
-        return [
-            self._parse_object_common_part(
-                name,
-                obj,
-                [*get_special_path(common_path_keyword, path), str(i)],
-                ignore_duplicate_model=True,
-                fields=[],
-                base_classes=[d.reference],
-                required=[],
-            )
-            if i in refs and d.reference
-            else d
-            for i, d in enumerate(parsed_schemas)
-        ]
-
-    def parse_any_of(
-        self, name: str, obj: JsonSchemaObject, path: List[str]
-    ) -> List[DataType]:
-        return self.parse_combined_schema(name, obj, path, 'anyOf')
-
-    def parse_one_of(
-        self, name: str, obj: JsonSchemaObject, path: List[str]
-    ) -> List[DataType]:
-        return self.parse_combined_schema(name, obj, path, 'oneOf')
-
-    def _parse_object_common_part(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-        ignore_duplicate_model: bool,
-        fields: List[DataModelFieldBase],
-        base_classes: List[Reference],
-        required: List[str],
-    ) -> DataType:
-        if obj.properties:
-            fields.extend(
-                self.parse_object_fields(obj, path, get_module_name(name, None))
-            )
-        # ignore an undetected object
-        if ignore_duplicate_model and not fields and len(base_classes) == 1:
-            with self.model_resolver.current_base_path_context(
-                self.model_resolver._base_path
-            ):
-                self.model_resolver.delete(path)
-                return self.data_type(reference=base_classes[0])
-        if required:
-            for field in fields:
-                if self.force_optional_for_required_fields or (  # pragma: no cover
-                    self.apply_default_values_for_required_fields and field.has_default
-                ):
-                    continue  # pragma: no cover
-                if (field.original_name or field.name) in required:
-                    field.required = True
-        if obj.required:
-            field_name_to_field = {f.original_name or f.name: f for f in fields}
-            for required_ in obj.required:
-                if required_ in field_name_to_field:
-                    field = field_name_to_field[required_]
-                    if self.force_optional_for_required_fields or (
-                        self.apply_default_values_for_required_fields
-                        and field.has_default
-                    ):
-                        continue
-                    field.required = True
-                else:
-                    fields.append(
-                        self.data_model_field_type(
-                            required=True, original_name=required_, data_type=DataType()
-                        )
-                    )
-        if self.use_title_as_name and obj.title:  # pragma: no cover
-            name = obj.title
-        reference = self.model_resolver.add(path, name, class_name=True, loaded=True)
-        self.set_additional_properties(reference.name, obj)
-        data_model_type = self.data_model_type(
-            reference=reference,
-            fields=fields,
-            base_classes=base_classes,
-            custom_base_class=obj.custom_base_path or self.base_class,
-            custom_template_dir=self.custom_template_dir,
-            extra_template_data=self.extra_template_data,
-            path=self.current_source_path,
-            description=obj.description if self.use_schema_description else None,
-            keyword_only=self.keyword_only,
-        )
-        self.results.append(data_model_type)
-
-        return self.data_type(reference=reference)
-
-    def _parse_all_of_item(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-        fields: List[DataModelFieldBase],
-        base_classes: List[Reference],
-        required: List[str],
-        union_models: List[Reference],
-    ) -> None:
-        for all_of_item in obj.allOf:
-            if all_of_item.ref:  # $ref
-                base_classes.append(self.model_resolver.add_ref(all_of_item.ref))
-            else:
-                module_name = get_module_name(name, None)
-                object_fields = self.parse_object_fields(
-                    all_of_item,
-                    path,
-                    module_name,
-                )
-
-                if object_fields:
-                    fields.extend(object_fields)
-                else:
-                    if all_of_item.required:
-                        required.extend(all_of_item.required)
-                self._parse_all_of_item(
-                    name,
-                    all_of_item,
-                    path,
-                    fields,
-                    base_classes,
-                    required,
-                    union_models,
-                )
-                if all_of_item.anyOf:
-                    self.model_resolver.add(path, name, class_name=True, loaded=True)
-                    union_models.extend(
-                        d.reference
-                        for d in self.parse_any_of(name, all_of_item, path)
-                        if d.reference
-                    )
-                if all_of_item.oneOf:
-                    self.model_resolver.add(path, name, class_name=True, loaded=True)
-                    union_models.extend(
-                        d.reference
-                        for d in self.parse_one_of(name, all_of_item, path)
-                        if d.reference
-                    )
-
-    def parse_all_of(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-        ignore_duplicate_model: bool = False,
-    ) -> DataType:
-        if len(obj.allOf) == 1 and not obj.properties:
-            single_obj = obj.allOf[0]
-            if single_obj.ref and single_obj.ref_type == JSONReference.LOCAL:
-                if get_model_by_path(self.raw_obj, single_obj.ref[2:].split('/')).get(
-                    'enum'
-                ):
-                    return self.get_ref_data_type(single_obj.ref)
-        fields: List[DataModelFieldBase] = []
-        base_classes: List[Reference] = []
-        required: List[str] = []
-        union_models: List[Reference] = []
-        self._parse_all_of_item(
-            name, obj, path, fields, base_classes, required, union_models
-        )
-        if not union_models:
-            return self._parse_object_common_part(
-                name, obj, path, ignore_duplicate_model, fields, base_classes, required
-            )
-        reference = self.model_resolver.add(path, name, class_name=True, loaded=True)
-        all_of_data_type = self._parse_object_common_part(
-            name,
-            obj,
-            get_special_path('allOf', path),
-            ignore_duplicate_model,
-            fields,
-            base_classes,
-            required,
-        )
-        data_type = self.data_type(
-            data_types=[
-                self._parse_object_common_part(
-                    name,
-                    obj,
-                    get_special_path(f'union_model-{index}', path),
-                    ignore_duplicate_model,
-                    [],
-                    [union_model, all_of_data_type.reference],  # type: ignore
-                    [],
-                )
-                for index, union_model in enumerate(union_models)
-            ]
-        )
-        field = self.get_object_field(
-            field_name=None,
-            field=obj,
-            required=True,
-            field_type=data_type,
-            alias=None,
-            original_field_name=None,
-        )
-        data_model_root = self.data_model_root_type(
-            reference=reference,
-            fields=[field],
-            custom_base_class=obj.custom_base_path or self.base_class,
-            custom_template_dir=self.custom_template_dir,
-            extra_template_data=self.extra_template_data,
-            path=self.current_source_path,
-            description=obj.description if self.use_schema_description else None,
-            nullable=obj.type_has_null,
-        )
-        self.results.append(data_model_root)
-        return self.data_type(reference=reference)
-
-    def parse_object_fields(
-        self, obj: JsonSchemaObject, path: List[str], module_name: Optional[str] = None
-    ) -> List[DataModelFieldBase]:
-        properties: Dict[str, Union[JsonSchemaObject, bool]] = (
-            {} if obj.properties is None else obj.properties
-        )
-        requires: Set[str] = {*()} if obj.required is None else {*obj.required}
-        fields: List[DataModelFieldBase] = []
-
-        exclude_field_names: Set[str] = set()
-        for original_field_name, field in properties.items():
-            field_name, alias = self.model_resolver.get_valid_field_name_and_alias(
-                original_field_name, exclude_field_names
-            )
-            modular_name = f'{module_name}.{field_name}' if module_name else field_name
-
-            exclude_field_names.add(field_name)
-
-            if isinstance(field, bool):
-                fields.append(
-                    self.data_model_field_type(
-                        name=field_name,
-                        data_type=self.data_type_manager.get_data_type(
-                            Types.any,
-                        ),
-                        required=False
-                        if self.force_optional_for_required_fields
-                        else original_field_name in requires,
-                        alias=alias,
-                        strip_default_none=self.strip_default_none,
-                        use_annotated=self.use_annotated,
-                        use_field_description=self.use_field_description,
-                        original_name=original_field_name,
-                    )
-                )
-                continue
-
-            field_type = self.parse_item(modular_name, field, [*path, field_name])
-
-            if self.force_optional_for_required_fields or (
-                self.apply_default_values_for_required_fields and field.has_default
-            ):
-                required: bool = False
-            else:
-                required = original_field_name in requires
-            fields.append(
-                self.get_object_field(
-                    field_name=field_name,
-                    field=field,
-                    required=required,
-                    field_type=field_type,
-                    alias=alias,
-                    original_field_name=original_field_name,
-                )
-            )
-        return fields
-
-    def parse_object(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-        singular_name: bool = False,
-        unique: bool = True,
-    ) -> DataType:
-        if not unique:  # pragma: no cover
-            warn(
-                f'{self.__class__.__name__}.parse_object() ignore `unique` argument.'
-                f'An object name must be unique.'
-                f'This argument will be removed in a future version'
-            )
-        if self.use_title_as_name and obj.title:
-            name = obj.title
-        reference = self.model_resolver.add(
-            path,
-            name,
-            class_name=True,
-            singular_name=singular_name,
-            loaded=True,
-        )
-        class_name = reference.name
-        self.set_title(class_name, obj)
-        fields = self.parse_object_fields(obj, path, get_module_name(class_name, None))
-        if fields or not isinstance(obj.additionalProperties, JsonSchemaObject):
-            data_model_type_class = self.data_model_type
-        else:
-            fields.append(
-                self.get_object_field(
-                    field_name=None,
-                    field=obj.additionalProperties,
-                    required=False,
-                    original_field_name=None,
-                    field_type=self.data_type(
-                        data_types=[
-                            self.parse_item(
-                                # TODO: Improve naming for nested ClassName
-                                name,
-                                obj.additionalProperties,
-                                [*path, 'additionalProperties'],
-                            )
-                        ],
-                        is_dict=True,
-                    ),
-                    alias=None,
-                )
-            )
-            data_model_type_class = self.data_model_root_type
-
-        self.set_additional_properties(class_name, obj)
-        data_model_type = data_model_type_class(
-            reference=reference,
-            fields=fields,
-            custom_base_class=obj.custom_base_path or self.base_class,
-            custom_template_dir=self.custom_template_dir,
-            extra_template_data=self.extra_template_data,
-            path=self.current_source_path,
-            description=obj.description if self.use_schema_description else None,
-            nullable=obj.type_has_null,
-            keyword_only=self.keyword_only,
-        )
-        self.results.append(data_model_type)
-        return self.data_type(reference=reference)
-
-    def parse_pattern_properties(
-        self,
-        name: str,
-        pattern_properties: Dict[str, JsonSchemaObject],
-        path: List[str],
-    ) -> DataType:
-        return self.data_type(
-            data_types=[
-                self.data_type(
-                    data_types=[
-                        self.parse_item(
-                            name,
-                            kv[1],
-                            get_special_path(f'patternProperties/{i}', path),
-                        )
-                    ],
-                    is_dict=True,
-                    dict_key=self.data_type_manager.get_data_type(
-                        Types.string,
-                        pattern=kv[0] if not self.field_constraints else None,
-                    ),
-                )
-                for i, kv in enumerate(pattern_properties.items())
-            ],
-        )
-
-    def parse_item(
-        self,
-        name: str,
-        item: JsonSchemaObject,
-        path: List[str],
-        singular_name: bool = False,
-        parent: Optional[JsonSchemaObject] = None,
-    ) -> DataType:
-        if self.use_title_as_name and item.title:
-            name = item.title
-            singular_name = False
-        if (
-            parent
-            and not item.enum
-            and item.has_constraint
-            and (parent.has_constraint or self.field_constraints)
-        ):
-            root_type_path = get_special_path('array', path)
-            return self.parse_root_type(
-                self.model_resolver.add(
-                    root_type_path,
-                    name,
-                    class_name=True,
-                    singular_name=singular_name,
-                ).name,
-                item,
-                root_type_path,
-            )
-        elif item.ref:
-            return self.get_ref_data_type(item.ref)
-        elif item.custom_type_path:
-            return self.data_type_manager.get_data_type_from_full_path(
-                item.custom_type_path, is_custom_type=True
-            )
-        elif item.is_array:
-            return self.parse_array_fields(
-                name, item, get_special_path('array', path)
-            ).data_type
-        elif (
-            item.discriminator
-            and parent
-            and parent.is_array
-            and (item.oneOf or item.anyOf)
-        ):
-            return self.parse_root_type(name, item, path)
-        elif item.anyOf:
-            return self.data_type(
-                data_types=self.parse_any_of(
-                    name, item, get_special_path('anyOf', path)
-                )
-            )
-        elif item.oneOf:
-            return self.data_type(
-                data_types=self.parse_one_of(
-                    name, item, get_special_path('oneOf', path)
-                )
-            )
-        elif item.allOf:
-            all_of_path = get_special_path('allOf', path)
-            all_of_path = [self.model_resolver.resolve_ref(all_of_path)]
-            return self.parse_all_of(
-                self.model_resolver.add(
-                    all_of_path, name, singular_name=singular_name, class_name=True
-                ).name,
-                item,
-                all_of_path,
-                ignore_duplicate_model=True,
-            )
-        elif item.is_object or item.patternProperties:
-            object_path = get_special_path('object', path)
-            if item.properties:
-                return self.parse_object(
-                    name, item, object_path, singular_name=singular_name
-                )
-            elif item.patternProperties:
-                # support only single key dict.
-                return self.parse_pattern_properties(
-                    name, item.patternProperties, object_path
-                )
-            elif isinstance(item.additionalProperties, JsonSchemaObject):
-                return self.data_type(
-                    data_types=[
-                        self.parse_item(name, item.additionalProperties, object_path)
-                    ],
-                    is_dict=True,
-                )
-            return self.data_type_manager.get_data_type(
-                Types.object,
-            )
-        elif item.enum:
-            if self.should_parse_enum_as_literal(item):
-                return self.parse_enum_as_literal(item)
-            return self.parse_enum(
-                name, item, get_special_path('enum', path), singular_name=singular_name
-            )
-        return self.get_data_type(item)
-
-    def parse_list_item(
-        self,
-        name: str,
-        target_items: List[JsonSchemaObject],
-        path: List[str],
-        parent: JsonSchemaObject,
-        singular_name: bool = True,
-    ) -> List[DataType]:
-        return [
-            self.parse_item(
-                name,
-                item,
-                [*path, str(index)],
-                singular_name=singular_name,
-                parent=parent,
-            )
-            for index, item in enumerate(target_items)
-        ]
-
-    def parse_array_fields(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-        singular_name: bool = True,
-    ) -> DataModelFieldBase:
-        if self.force_optional_for_required_fields:
-            required: bool = False
-            nullable: Optional[bool] = None
-        else:
-            required = not (
-                obj.has_default and self.apply_default_values_for_required_fields
-            )
-            if self.strict_nullable:
-                nullable = obj.nullable if obj.has_default or required else True
-            else:
-                required = not obj.nullable and required
-                nullable = None
-        if isinstance(obj.items, JsonSchemaObject):
-            items: List[JsonSchemaObject] = [obj.items]
-        elif isinstance(obj.items, list):
-            items = obj.items
-        else:
-            items = []
-
-        data_types: List[DataType] = [
-            self.data_type(
-                data_types=self.parse_list_item(
-                    name,
-                    items,
-                    path,
-                    obj,
-                    singular_name=singular_name,
-                ),
-                is_list=True,
-            )
-        ]
-        # TODO: decide special path word for a combined data model.
-        if obj.allOf:
-            data_types.append(
-                self.parse_all_of(name, obj, get_special_path('allOf', path))
-            )
-        elif obj.is_object:
-            data_types.append(
-                self.parse_object(name, obj, get_special_path('object', path))
-            )
-        if obj.enum:
-            data_types.append(
-                self.parse_enum(name, obj, get_special_path('enum', path))
-            )
-        return self.data_model_field_type(
-            data_type=self.data_type(data_types=data_types),
-            default=obj.default,
-            required=required,
-            constraints=obj.dict(),
-            nullable=nullable,
-            strip_default_none=self.strip_default_none,
-            extras=self.get_field_extras(obj),
-            use_annotated=self.use_annotated,
-            use_field_description=self.use_field_description,
-            original_name=None,
-            has_default=obj.has_default,
-        )
-
-    def parse_array(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-        original_name: Optional[str] = None,
-    ) -> DataType:
-        if self.use_title_as_name and obj.title:
-            name = obj.title
-        reference = self.model_resolver.add(path, name, loaded=True, class_name=True)
-        field = self.parse_array_fields(original_name or name, obj, [*path, name])
-
-        if reference in [
-            d.reference for d in field.data_type.all_data_types if d.reference
-        ]:
-            # self-reference
-            field = self.data_model_field_type(
-                data_type=self.data_type(
-                    data_types=[
-                        self.data_type(
-                            data_types=field.data_type.data_types[1:], is_list=True
-                        ),
-                        *field.data_type.data_types[1:],
-                    ]
-                ),
-                default=field.default,
-                required=field.required,
-                constraints=field.constraints,
-                nullable=field.nullable,
-                strip_default_none=field.strip_default_none,
-                extras=field.extras,
-                use_annotated=self.use_annotated,
-                use_field_description=self.use_field_description,
-                original_name=None,
-                has_default=field.has_default,
-            )
-
-        data_model_root = self.data_model_root_type(
-            reference=reference,
-            fields=[field],
-            custom_base_class=obj.custom_base_path or self.base_class,
-            custom_template_dir=self.custom_template_dir,
-            extra_template_data=self.extra_template_data,
-            path=self.current_source_path,
-            description=obj.description if self.use_schema_description else None,
-            nullable=obj.type_has_null,
-        )
-        self.results.append(data_model_root)
-        return self.data_type(reference=reference)
-
-    def parse_root_type(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-    ) -> DataType:
-        reference: Optional[Reference] = None
-        if obj.ref:
-            data_type: DataType = self.get_ref_data_type(obj.ref)
-        elif obj.custom_type_path:
-            data_type = self.data_type_manager.get_data_type_from_full_path(
-                obj.custom_type_path, is_custom_type=True
-            )  # pragma: no cover
-        elif obj.is_array:
-            data_type = self.parse_array_fields(
-                name, obj, get_special_path('array', path)
-            ).data_type  # pragma: no cover
-        elif obj.anyOf or obj.oneOf:
-            reference = self.model_resolver.add(
-                path, name, loaded=True, class_name=True
-            )
-            if obj.anyOf:
-                data_types: List[DataType] = self.parse_any_of(
-                    name, obj, get_special_path('anyOf', path)
-                )
-            else:
-                data_types = self.parse_one_of(
-                    name, obj, get_special_path('oneOf', path)
-                )
-
-            if len(data_types) > 1:  # pragma: no cover
-                data_type = self.data_type(data_types=data_types)
-            elif not data_types:  # pragma: no cover
-                return EmptyDataType()
-            else:  # pragma: no cover
-                data_type = data_types[0]
-        elif obj.patternProperties:
-            data_type = self.parse_pattern_properties(name, obj.patternProperties, path)
-        elif obj.enum:
-            if self.should_parse_enum_as_literal(obj):
-                data_type = self.parse_enum_as_literal(obj)
-            else:  # pragma: no cover
-                data_type = self.parse_enum(name, obj, path)
-        elif obj.type:
-            data_type = self.get_data_type(obj)
-        else:
-            data_type = self.data_type_manager.get_data_type(
-                Types.any,
-            )
-        if self.force_optional_for_required_fields:
-            required: bool = False
-        else:
-            required = not obj.nullable and not (
-                obj.has_default and self.apply_default_values_for_required_fields
-            )
-        if self.use_title_as_name and obj.title:
-            name = obj.title
-        if not reference:
-            reference = self.model_resolver.add(
-                path, name, loaded=True, class_name=True
-            )
-        self.set_title(name, obj)
-        self.set_additional_properties(name, obj)
-        data_model_root_type = self.data_model_root_type(
-            reference=reference,
-            fields=[
-                self.data_model_field_type(
-                    data_type=data_type,
-                    default=obj.default,
-                    required=required,
-                    constraints=obj.dict() if self.field_constraints else {},
-                    nullable=obj.nullable if self.strict_nullable else None,
-                    strip_default_none=self.strip_default_none,
-                    extras=self.get_field_extras(obj),
-                    use_annotated=self.use_annotated,
-                    use_field_description=self.use_field_description,
-                    original_name=None,
-                    has_default=obj.has_default,
-                )
-            ],
-            custom_base_class=obj.custom_base_path or self.base_class,
-            custom_template_dir=self.custom_template_dir,
-            extra_template_data=self.extra_template_data,
-            path=self.current_source_path,
-            nullable=obj.type_has_null,
-        )
-        self.results.append(data_model_root_type)
-        return self.data_type(reference=reference)
-
-    def parse_enum_as_literal(self, obj: JsonSchemaObject) -> DataType:
-        return self.data_type(literals=[i for i in obj.enum if i is not None])
-
-    def parse_enum(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-        singular_name: bool = False,
-        unique: bool = True,
-    ) -> DataType:
-        if not unique:  # pragma: no cover
-            warn(
-                f'{self.__class__.__name__}.parse_enum() ignore `unique` argument.'
-                f'An object name must be unique.'
-                f'This argument will be removed in a future version'
-            )
-        enum_fields: List[DataModelFieldBase] = []
-
-        if None in obj.enum and obj.type == 'string':
-            # Nullable is valid in only OpenAPI
-            nullable: bool = True
-            enum_times = [e for e in obj.enum if e is not None]
-        else:
-            enum_times = obj.enum
-            nullable = False
-
-        exclude_field_names: Set[str] = set()
-
-        for i, enum_part in enumerate(enum_times):
-            if obj.type == 'string' or isinstance(enum_part, str):
-                default = (
-                    f"'{enum_part.translate(escape_characters)}'"
-                    if isinstance(enum_part, str)
-                    else enum_part
-                )
-                if obj.x_enum_varnames:
-                    field_name = obj.x_enum_varnames[i]
-                else:
-                    field_name = str(enum_part)
-            else:
-                default = enum_part
-                if obj.x_enum_varnames:
-                    field_name = obj.x_enum_varnames[i]
-                else:
-                    prefix = (
-                        obj.type
-                        if isinstance(obj.type, str)
-                        else type(enum_part).__name__
-                    )
-                    field_name = f'{prefix}_{enum_part}'
-            field_name = self.model_resolver.get_valid_field_name(
-                field_name, excludes=exclude_field_names, model_type=ModelType.ENUM
-            )
-            exclude_field_names.add(field_name)
-            enum_fields.append(
-                self.data_model_field_type(
-                    name=field_name,
-                    default=default,
-                    data_type=self.data_type_manager.get_data_type(
-                        Types.any,
-                    ),
-                    required=True,
-                    strip_default_none=self.strip_default_none,
-                    has_default=obj.has_default,
-                    use_field_description=self.use_field_description,
-                    original_name=None,
-                )
-            )
-
-        def create_enum(reference_: Reference) -> DataType:
-            enum = Enum(
-                reference=reference_,
-                fields=enum_fields,
-                path=self.current_source_path,
-                description=obj.description if self.use_schema_description else None,
-                custom_template_dir=self.custom_template_dir,
-                type_=_get_type(obj.type, obj.format)
-                if self.use_subclass_enum and isinstance(obj.type, str)
-                else None,
-                default=obj.default if obj.has_default else UNDEFINED,
-            )
-            self.results.append(enum)
-            return self.data_type(reference=reference_)
-
-        if self.use_title_as_name and obj.title:
-            name = obj.title
-        reference = self.model_resolver.add(
-            path,
-            name,
-            class_name=True,
-            singular_name=singular_name,
-            singular_name_suffix='Enum',
-            loaded=True,
-        )
-
-        if not nullable:
-            return create_enum(reference)
-
-        enum_reference = self.model_resolver.add(
-            [*path, 'Enum'],
-            f'{reference.name}Enum',
-            class_name=True,
-            singular_name=singular_name,
-            singular_name_suffix='Enum',
-            loaded=True,
-        )
-
-        data_model_root_type = self.data_model_root_type(
-            reference=reference,
-            fields=[
-                self.data_model_field_type(
-                    data_type=create_enum(enum_reference),
-                    default=obj.default,
-                    required=False,
-                    nullable=True,
-                    strip_default_none=self.strip_default_none,
-                    extras=self.get_field_extras(obj),
-                    use_annotated=self.use_annotated,
-                    has_default=obj.has_default,
-                    use_field_description=self.use_field_description,
-                    original_name=None,
-                )
-            ],
-            custom_base_class=obj.custom_base_path or self.base_class,
-            custom_template_dir=self.custom_template_dir,
-            extra_template_data=self.extra_template_data,
-            path=self.current_source_path,
-            default=obj.default if obj.has_default else UNDEFINED,
-            nullable=obj.type_has_null,
-        )
-        self.results.append(data_model_root_type)
-        return self.data_type(reference=reference)
-
-    def _get_ref_body(self, resolved_ref: str) -> Dict[Any, Any]:
-        if is_url(resolved_ref):
-            return self._get_ref_body_from_url(resolved_ref)
-        return self._get_ref_body_from_remote(resolved_ref)
-
-    def _get_ref_body_from_url(self, ref: str) -> Dict[Any, Any]:
-        # URL Reference – $ref: 'http://path/to/your/resource' Uses the whole document located on the different server.
-        return self.remote_object_cache.get_or_put(
-            ref, default_factory=lambda key: load_yaml(self._get_text_from_url(key))
-        )
-
-    def _get_ref_body_from_remote(self, resolved_ref: str) -> Dict[Any, Any]:
-        # Remote Reference – $ref: 'document.json' Uses the whole document located on the same server and in
-        # the same location. TODO treat edge case
-        full_path = self.base_path / resolved_ref
-
-        return self.remote_object_cache.get_or_put(
-            str(full_path),
-            default_factory=lambda _: load_yaml_from_path(full_path, self.encoding),
-        )
-
-    def resolve_ref(self, object_ref: str) -> Reference:
-        reference = self.model_resolver.add_ref(object_ref)
-        if reference.loaded:
-            return reference
-
-        # https://swagger.io/docs/specification/using-ref/
-        ref = self.model_resolver.resolve_ref(object_ref)
-        if get_ref_type(object_ref) == JSONReference.LOCAL:
-            # Local Reference – $ref: '#/definitions/myElement'
-            self.reserved_refs[tuple(self.model_resolver.current_root)].add(ref)  # type: ignore
-            return reference
-        elif self.model_resolver.is_after_load(ref):
-            self.reserved_refs[tuple(ref.split('#')[0].split('/'))].add(ref)  # type: ignore
-            return reference
-
-        if is_url(ref):
-            relative_path, object_path = ref.split('#')
-            relative_paths = [relative_path]
-            base_path = None
-        else:
-            if self.model_resolver.is_external_root_ref(ref):
-                relative_path, object_path = ref[:-1], ''
-            else:
-                relative_path, object_path = ref.split('#')
-            relative_paths = relative_path.split('/')
-            base_path = Path(*relative_paths).parent
-        with self.model_resolver.current_base_path_context(
-            base_path
-        ), self.model_resolver.base_url_context(relative_path):
-            self._parse_file(
-                self._get_ref_body(relative_path),
-                self.model_resolver.add_ref(ref, resolved=True).name,
-                relative_paths,
-                object_path.split('/') if object_path else None,
-            )
-        reference.loaded = True
-        return reference
-
-    def parse_ref(self, obj: JsonSchemaObject, path: List[str]) -> None:
-        if obj.ref:
-            self.resolve_ref(obj.ref)
-        if obj.items:
-            if isinstance(obj.items, JsonSchemaObject):
-                self.parse_ref(obj.items, path)
-            else:
-                if isinstance(obj.items, list):
-                    for item in obj.items:
-                        self.parse_ref(item, path)
-        if isinstance(obj.additionalProperties, JsonSchemaObject):
-            self.parse_ref(obj.additionalProperties, path)
-        if obj.patternProperties:
-            for value in obj.patternProperties.values():
-                self.parse_ref(value, path)
-        for item in obj.anyOf:
-            self.parse_ref(item, path)
-        for item in obj.allOf:
-            self.parse_ref(item, path)
-        for item in obj.oneOf:
-            self.parse_ref(item, path)
-        if obj.properties:
-            for property_value in obj.properties.values():
-                if isinstance(property_value, JsonSchemaObject):
-                    self.parse_ref(property_value, path)
-
-    def parse_id(self, obj: JsonSchemaObject, path: List[str]) -> None:
-        if obj.id:
-            self.model_resolver.add_id(obj.id, path)
-        if obj.items:
-            if isinstance(obj.items, JsonSchemaObject):
-                self.parse_id(obj.items, path)
-            else:
-                if isinstance(obj.items, list):
-                    for item in obj.items:
-                        self.parse_id(item, path)
-        if isinstance(obj.additionalProperties, JsonSchemaObject):
-            self.parse_id(obj.additionalProperties, path)
-        if obj.patternProperties:
-            for value in obj.patternProperties.values():
-                self.parse_id(value, path)
-        for item in obj.anyOf:
-            self.parse_id(item, path)
-        for item in obj.allOf:
-            self.parse_id(item, path)
-        if obj.properties:
-            for property_value in obj.properties.values():
-                if isinstance(property_value, JsonSchemaObject):
-                    self.parse_id(property_value, path)
-
-    @contextmanager
-    def root_id_context(self, root_raw: Dict[str, Any]) -> Generator[None, None, None]:
-        root_id: Optional[str] = root_raw.get('$id')
-        previous_root_id: Optional[str] = self.root_id
-        self.root_id = root_id if root_id else None
-        yield
-        self.root_id = previous_root_id
-
-    def parse_raw_obj(
-        self,
-        name: str,
-        raw: Dict[str, Any],
-        path: List[str],
-    ) -> None:
-        self.parse_obj(name, self.SCHEMA_OBJECT_TYPE.parse_obj(raw), path)
-
-    def parse_obj(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-    ) -> None:
-        if obj.is_array:
-            self.parse_array(name, obj, path)
-        elif obj.allOf:
-            self.parse_all_of(name, obj, path)
-        elif obj.oneOf or obj.anyOf:
-            data_type = self.parse_root_type(name, obj, path)
-            if isinstance(data_type, EmptyDataType) and obj.properties:
-                self.parse_object(name, obj, path)  # pragma: no cover
-        elif obj.properties:
-            self.parse_object(name, obj, path)
-        elif obj.patternProperties:
-            self.parse_root_type(name, obj, path)
-        elif obj.type == 'object':
-            self.parse_object(name, obj, path)
-        elif obj.enum and not self.should_parse_enum_as_literal(obj):
-            self.parse_enum(name, obj, path)
-        else:
-            self.parse_root_type(name, obj, path)
-        self.parse_ref(obj, path)
-
-    def _get_context_source_path_parts(self) -> Iterator[Tuple[Source, List[str]]]:
-        if isinstance(self.source, list) or (
-            isinstance(self.source, Path) and self.source.is_dir()
-        ):
-            self.current_source_path = Path()
-            self.model_resolver.after_load_files = {
-                self.base_path.joinpath(s.path).resolve().as_posix()
-                for s in self.iter_source
-            }
-
-        for source in self.iter_source:
-            if isinstance(self.source, ParseResult):
-                path_parts = self.get_url_path_parts(self.source)
-            else:
-                path_parts = list(source.path.parts)
-            if self.current_source_path is not None:
-                self.current_source_path = source.path
-            with self.model_resolver.current_base_path_context(
-                source.path.parent
-            ), self.model_resolver.current_root_context(path_parts):
-                yield source, path_parts
-
-    def parse_raw(self) -> None:
-        for source, path_parts in self._get_context_source_path_parts():
-            self.raw_obj = load_yaml(source.text)
-            if self.raw_obj is None:  # pragma: no cover
-                warn(f'{source.path} is empty. Skipping this file')
-                continue
-            if self.custom_class_name_generator:
-                obj_name = self.raw_obj.get('title', 'Model')
-            else:
-                if self.class_name:
-                    obj_name = self.class_name
-                else:
-                    # backward compatible
-                    obj_name = self.raw_obj.get('title', 'Model')
-                    if not self.model_resolver.validate_name(obj_name):
-                        obj_name = title_to_class_name(obj_name)
-                if not self.model_resolver.validate_name(obj_name):
-                    raise InvalidClassNameError(obj_name)
-            self._parse_file(self.raw_obj, obj_name, path_parts)
-
-        self._resolve_unparsed_json_pointer()
-
-    def _resolve_unparsed_json_pointer(self) -> None:
-        model_count: int = len(self.results)
-        for source in self.iter_source:
-            path_parts = list(source.path.parts)
-            reserved_refs = self.reserved_refs.get(tuple(path_parts))  # type: ignore
-            if not reserved_refs:
-                continue
-            if self.current_source_path is not None:
-                self.current_source_path = source.path
-
-            with self.model_resolver.current_base_path_context(
-                source.path.parent
-            ), self.model_resolver.current_root_context(path_parts):
-                for reserved_ref in sorted(reserved_refs):
-                    if self.model_resolver.add_ref(reserved_ref, resolved=True).loaded:
-                        continue
-                    # for root model
-                    self.raw_obj = load_yaml(source.text)
-                    self.parse_json_pointer(self.raw_obj, reserved_ref, path_parts)
-
-        if model_count != len(self.results):
-            # New model have been generated. It try to resolve json pointer again.
-            self._resolve_unparsed_json_pointer()
-
-    def parse_json_pointer(
-        self, raw: Dict[str, Any], ref: str, path_parts: List[str]
-    ) -> None:
-        path = ref.split('#', 1)[-1]
-        if path[0] == '/':  # pragma: no cover
-            path = path[1:]
-        object_paths = path.split('/')
-        models = get_model_by_path(raw, object_paths)
-        model_name = object_paths[-1]
-
-        self.parse_raw_obj(
-            model_name, models, [*path_parts, f'#/{object_paths[0]}', *object_paths[1:]]
-        )
-
-    def _parse_file(
-        self,
-        raw: Dict[str, Any],
-        obj_name: str,
-        path_parts: List[str],
-        object_paths: Optional[List[str]] = None,
-    ) -> None:
-        object_paths = [o for o in object_paths or [] if o]
-        if object_paths:
-            path = [*path_parts, f'#/{object_paths[0]}', *object_paths[1:]]
-        else:
-            path = path_parts
-        with self.model_resolver.current_root_context(path_parts):
-            obj_name = self.model_resolver.add(
-                path, obj_name, unique=False, class_name=True
-            ).name
-            with self.root_id_context(raw):
-                # Some jsonschema docs include attribute self to have include version details
-                raw.pop('self', None)
-                # parse $id before parsing $ref
-                root_obj = self.SCHEMA_OBJECT_TYPE.parse_obj(raw)
-                self.parse_id(root_obj, path_parts)
-                definitions: Optional[Dict[Any, Any]] = None
-                for schema_path, split_schema_path in self.schema_paths:
-                    try:
-                        definitions = get_model_by_path(raw, split_schema_path)
-                        if definitions:
-                            break
-                    except KeyError:
-                        continue
-                if definitions is None:
-                    definitions = {}
-
-                for key, model in definitions.items():
-                    obj = self.SCHEMA_OBJECT_TYPE.parse_obj(model)
-                    self.parse_id(obj, [*path_parts, schema_path, key])
-
-                if object_paths:
-                    models = get_model_by_path(raw, object_paths)
-                    model_name = object_paths[-1]
-                    self.parse_obj(
-                        model_name, self.SCHEMA_OBJECT_TYPE.parse_obj(models), path
-                    )
-                else:
-                    self.parse_obj(obj_name, root_obj, path_parts or ['#'])
-                for key, model in definitions.items():
-                    path = [*path_parts, schema_path, key]
-                    reference = self.model_resolver.get(path)
-                    if not reference or not reference.loaded:
-                        self.parse_raw_obj(key, model, path)
-
-                key = tuple(path_parts)
-                reserved_refs = set(self.reserved_refs.get(key) or [])
-                while reserved_refs:
-                    for reserved_path in sorted(reserved_refs):
-                        reference = self.model_resolver.get(reserved_path)
-                        if not reference or reference.loaded:
-                            continue
-                        object_paths = reserved_path.split('#/', 1)[-1].split('/')
-                        path = reserved_path.split('/')
-                        models = get_model_by_path(raw, object_paths)
-                        model_name = object_paths[-1]
-                        self.parse_obj(
-                            model_name, self.SCHEMA_OBJECT_TYPE.parse_obj(models), path
-                        )
-                    previous_reserved_refs = reserved_refs
-                    reserved_refs = set(self.reserved_refs.get(key) or [])
-                    if previous_reserved_refs == reserved_refs:
-                        break
diff -pruN 0.26.4-3/datamodel_code_generator/parser/openapi.py 0.45.0-1/datamodel_code_generator/parser/openapi.py
--- 0.26.4-3/datamodel_code_generator/parser/openapi.py	2024-12-15 17:25:57.706037000 +0000
+++ 0.45.0-1/datamodel_code_generator/parser/openapi.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,648 +0,0 @@
-from __future__ import annotations
-
-import re
-from collections import defaultdict
-from enum import Enum
-from pathlib import Path
-from typing import (
-    Any,
-    Callable,
-    ClassVar,
-    DefaultDict,
-    Dict,
-    Iterable,
-    List,
-    Mapping,
-    Optional,
-    Pattern,
-    Sequence,
-    Set,
-    Tuple,
-    Type,
-    TypeVar,
-    Union,
-)
-from urllib.parse import ParseResult
-from warnings import warn
-
-from pydantic import Field
-
-from datamodel_code_generator import (
-    DefaultPutDict,
-    Error,
-    LiteralType,
-    OpenAPIScope,
-    PythonVersion,
-    load_yaml,
-    snooper_to_methods,
-)
-from datamodel_code_generator.format import DatetimeClassType
-from datamodel_code_generator.model import DataModel, DataModelFieldBase
-from datamodel_code_generator.model import pydantic as pydantic_model
-from datamodel_code_generator.parser.base import get_special_path
-from datamodel_code_generator.parser.jsonschema import (
-    JsonSchemaObject,
-    JsonSchemaParser,
-    get_model_by_path,
-)
-from datamodel_code_generator.reference import snake_to_upper_camel
-from datamodel_code_generator.types import (
-    DataType,
-    DataTypeManager,
-    EmptyDataType,
-    StrictTypes,
-)
-from datamodel_code_generator.util import BaseModel
-
-RE_APPLICATION_JSON_PATTERN: Pattern[str] = re.compile(r'^application/.*json$')
-
-OPERATION_NAMES: List[str] = [
-    'get',
-    'put',
-    'post',
-    'delete',
-    'patch',
-    'head',
-    'options',
-    'trace',
-]
-
-
-class ParameterLocation(Enum):
-    query = 'query'
-    header = 'header'
-    path = 'path'
-    cookie = 'cookie'
-
-
-BaseModelT = TypeVar('BaseModelT', bound=BaseModel)
-
-
-class ReferenceObject(BaseModel):
-    ref: str = Field(..., alias='$ref')
-
-
-class ExampleObject(BaseModel):
-    summary: Optional[str] = None
-    description: Optional[str] = None
-    value: Any = None
-    externalValue: Optional[str] = None
-
-
-class MediaObject(BaseModel):
-    schema_: Union[ReferenceObject, JsonSchemaObject, None] = Field(
-        None, alias='schema'
-    )
-    example: Any = None
-    examples: Union[str, ReferenceObject, ExampleObject, None] = None
-
-
-class ParameterObject(BaseModel):
-    name: Optional[str] = None
-    in_: Optional[ParameterLocation] = Field(None, alias='in')
-    description: Optional[str] = None
-    required: bool = False
-    deprecated: bool = False
-    schema_: Optional[JsonSchemaObject] = Field(None, alias='schema')
-    example: Any = None
-    examples: Union[str, ReferenceObject, ExampleObject, None] = None
-    content: Dict[str, MediaObject] = {}
-
-
-class HeaderObject(BaseModel):
-    description: Optional[str] = None
-    required: bool = False
-    deprecated: bool = False
-    schema_: Optional[JsonSchemaObject] = Field(None, alias='schema')
-    example: Any = None
-    examples: Union[str, ReferenceObject, ExampleObject, None] = None
-    content: Dict[str, MediaObject] = {}
-
-
-class RequestBodyObject(BaseModel):
-    description: Optional[str] = None
-    content: Dict[str, MediaObject] = {}
-    required: bool = False
-
-
-class ResponseObject(BaseModel):
-    description: Optional[str] = None
-    headers: Dict[str, ParameterObject] = {}
-    content: Dict[Union[str, int], MediaObject] = {}
-
-
-class Operation(BaseModel):
-    tags: List[str] = []
-    summary: Optional[str] = None
-    description: Optional[str] = None
-    operationId: Optional[str] = None
-    parameters: List[Union[ReferenceObject, ParameterObject]] = []
-    requestBody: Union[ReferenceObject, RequestBodyObject, None] = None
-    responses: Dict[Union[str, int], Union[ReferenceObject, ResponseObject]] = {}
-    deprecated: bool = False
-
-
-class ComponentsObject(BaseModel):
-    schemas: Dict[str, Union[ReferenceObject, JsonSchemaObject]] = {}
-    responses: Dict[str, Union[ReferenceObject, ResponseObject]] = {}
-    examples: Dict[str, Union[ReferenceObject, ExampleObject]] = {}
-    requestBodies: Dict[str, Union[ReferenceObject, RequestBodyObject]] = {}
-    headers: Dict[str, Union[ReferenceObject, HeaderObject]] = {}
-
-
-@snooper_to_methods(max_variable_length=None)
-class OpenAPIParser(JsonSchemaParser):
-    SCHEMA_PATHS: ClassVar[List[str]] = ['#/components/schemas']
-
-    def __init__(
-        self,
-        source: Union[str, Path, List[Path], ParseResult],
-        *,
-        data_model_type: Type[DataModel] = pydantic_model.BaseModel,
-        data_model_root_type: Type[DataModel] = pydantic_model.CustomRootType,
-        data_type_manager_type: Type[DataTypeManager] = pydantic_model.DataTypeManager,
-        data_model_field_type: Type[DataModelFieldBase] = pydantic_model.DataModelField,
-        base_class: Optional[str] = None,
-        additional_imports: Optional[List[str]] = None,
-        custom_template_dir: Optional[Path] = None,
-        extra_template_data: Optional[DefaultDict[str, Dict[str, Any]]] = None,
-        target_python_version: PythonVersion = PythonVersion.PY_38,
-        dump_resolve_reference_action: Optional[Callable[[Iterable[str]], str]] = None,
-        validation: bool = False,
-        field_constraints: bool = False,
-        snake_case_field: bool = False,
-        strip_default_none: bool = False,
-        aliases: Optional[Mapping[str, str]] = None,
-        allow_population_by_field_name: bool = False,
-        allow_extra_fields: bool = False,
-        apply_default_values_for_required_fields: bool = False,
-        force_optional_for_required_fields: bool = False,
-        class_name: Optional[str] = None,
-        use_standard_collections: bool = False,
-        base_path: Optional[Path] = None,
-        use_schema_description: bool = False,
-        use_field_description: bool = False,
-        use_default_kwarg: bool = False,
-        reuse_model: bool = False,
-        encoding: str = 'utf-8',
-        enum_field_as_literal: Optional[LiteralType] = None,
-        use_one_literal_as_default: bool = False,
-        set_default_enum_member: bool = False,
-        use_subclass_enum: bool = False,
-        strict_nullable: bool = False,
-        use_generic_container_types: bool = False,
-        enable_faux_immutability: bool = False,
-        remote_text_cache: Optional[DefaultPutDict[str, str]] = None,
-        disable_appending_item_suffix: bool = False,
-        strict_types: Optional[Sequence[StrictTypes]] = None,
-        empty_enum_field_name: Optional[str] = None,
-        custom_class_name_generator: Optional[Callable[[str], str]] = None,
-        field_extra_keys: Optional[Set[str]] = None,
-        field_include_all_keys: bool = False,
-        field_extra_keys_without_x_prefix: Optional[Set[str]] = None,
-        openapi_scopes: Optional[List[OpenAPIScope]] = None,
-        wrap_string_literal: Optional[bool] = False,
-        use_title_as_name: bool = False,
-        use_operation_id_as_name: bool = False,
-        use_unique_items_as_set: bool = False,
-        http_headers: Optional[Sequence[Tuple[str, str]]] = None,
-        http_ignore_tls: bool = False,
-        use_annotated: bool = False,
-        use_non_positive_negative_number_constrained_types: bool = False,
-        original_field_name_delimiter: Optional[str] = None,
-        use_double_quotes: bool = False,
-        use_union_operator: bool = False,
-        allow_responses_without_content: bool = False,
-        collapse_root_models: bool = False,
-        special_field_name_prefix: Optional[str] = None,
-        remove_special_field_name_prefix: bool = False,
-        capitalise_enum_members: bool = False,
-        keep_model_order: bool = False,
-        known_third_party: Optional[List[str]] = None,
-        custom_formatters: Optional[List[str]] = None,
-        custom_formatters_kwargs: Optional[Dict[str, Any]] = None,
-        use_pendulum: bool = False,
-        http_query_parameters: Optional[Sequence[Tuple[str, str]]] = None,
-        treat_dots_as_module: bool = False,
-        use_exact_imports: bool = False,
-        default_field_extras: Optional[Dict[str, Any]] = None,
-        target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
-        keyword_only: bool = False,
-        no_alias: bool = False,
-    ):
-        super().__init__(
-            source=source,
-            data_model_type=data_model_type,
-            data_model_root_type=data_model_root_type,
-            data_type_manager_type=data_type_manager_type,
-            data_model_field_type=data_model_field_type,
-            base_class=base_class,
-            additional_imports=additional_imports,
-            custom_template_dir=custom_template_dir,
-            extra_template_data=extra_template_data,
-            target_python_version=target_python_version,
-            dump_resolve_reference_action=dump_resolve_reference_action,
-            validation=validation,
-            field_constraints=field_constraints,
-            snake_case_field=snake_case_field,
-            strip_default_none=strip_default_none,
-            aliases=aliases,
-            allow_population_by_field_name=allow_population_by_field_name,
-            allow_extra_fields=allow_extra_fields,
-            apply_default_values_for_required_fields=apply_default_values_for_required_fields,
-            force_optional_for_required_fields=force_optional_for_required_fields,
-            class_name=class_name,
-            use_standard_collections=use_standard_collections,
-            base_path=base_path,
-            use_schema_description=use_schema_description,
-            use_field_description=use_field_description,
-            use_default_kwarg=use_default_kwarg,
-            reuse_model=reuse_model,
-            encoding=encoding,
-            enum_field_as_literal=enum_field_as_literal,
-            use_one_literal_as_default=use_one_literal_as_default,
-            set_default_enum_member=set_default_enum_member,
-            use_subclass_enum=use_subclass_enum,
-            strict_nullable=strict_nullable,
-            use_generic_container_types=use_generic_container_types,
-            enable_faux_immutability=enable_faux_immutability,
-            remote_text_cache=remote_text_cache,
-            disable_appending_item_suffix=disable_appending_item_suffix,
-            strict_types=strict_types,
-            empty_enum_field_name=empty_enum_field_name,
-            custom_class_name_generator=custom_class_name_generator,
-            field_extra_keys=field_extra_keys,
-            field_include_all_keys=field_include_all_keys,
-            field_extra_keys_without_x_prefix=field_extra_keys_without_x_prefix,
-            wrap_string_literal=wrap_string_literal,
-            use_title_as_name=use_title_as_name,
-            use_operation_id_as_name=use_operation_id_as_name,
-            use_unique_items_as_set=use_unique_items_as_set,
-            http_headers=http_headers,
-            http_ignore_tls=http_ignore_tls,
-            use_annotated=use_annotated,
-            use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
-            original_field_name_delimiter=original_field_name_delimiter,
-            use_double_quotes=use_double_quotes,
-            use_union_operator=use_union_operator,
-            allow_responses_without_content=allow_responses_without_content,
-            collapse_root_models=collapse_root_models,
-            special_field_name_prefix=special_field_name_prefix,
-            remove_special_field_name_prefix=remove_special_field_name_prefix,
-            capitalise_enum_members=capitalise_enum_members,
-            keep_model_order=keep_model_order,
-            known_third_party=known_third_party,
-            custom_formatters=custom_formatters,
-            custom_formatters_kwargs=custom_formatters_kwargs,
-            use_pendulum=use_pendulum,
-            http_query_parameters=http_query_parameters,
-            treat_dots_as_module=treat_dots_as_module,
-            use_exact_imports=use_exact_imports,
-            default_field_extras=default_field_extras,
-            target_datetime_class=target_datetime_class,
-            keyword_only=keyword_only,
-            no_alias=no_alias,
-        )
-        self.open_api_scopes: List[OpenAPIScope] = openapi_scopes or [
-            OpenAPIScope.Schemas
-        ]
-
-    def get_ref_model(self, ref: str) -> Dict[str, Any]:
-        ref_file, ref_path = self.model_resolver.resolve_ref(ref).split('#', 1)
-        if ref_file:
-            ref_body = self._get_ref_body(ref_file)
-        else:  # pragma: no cover
-            ref_body = self.raw_obj
-        return get_model_by_path(ref_body, ref_path.split('/')[1:])
-
-    def get_data_type(self, obj: JsonSchemaObject) -> DataType:
-        # OpenAPI 3.0 doesn't allow `null` in the `type` field and list of types
-        # https://swagger.io/docs/specification/data-models/data-types/#null
-        # OpenAPI 3.1 does allow `null` in the `type` field and is equivalent to
-        # a `nullable` flag on the property itself
-        if obj.nullable and self.strict_nullable and isinstance(obj.type, str):
-            obj.type = [obj.type, 'null']
-
-        return super().get_data_type(obj)
-
-    def resolve_object(
-        self, obj: Union[ReferenceObject, BaseModelT], object_type: Type[BaseModelT]
-    ) -> BaseModelT:
-        if isinstance(obj, ReferenceObject):
-            ref_obj = self.get_ref_model(obj.ref)
-            return object_type.parse_obj(ref_obj)
-        return obj
-
-    def parse_schema(
-        self,
-        name: str,
-        obj: JsonSchemaObject,
-        path: List[str],
-    ) -> DataType:
-        if obj.is_array:
-            data_type = self.parse_array(name, obj, [*path, name])
-        elif obj.allOf:  # pragma: no cover
-            data_type = self.parse_all_of(name, obj, path)
-        elif obj.oneOf or obj.anyOf:  # pragma: no cover
-            data_type = self.parse_root_type(name, obj, path)
-            if isinstance(data_type, EmptyDataType) and obj.properties:
-                self.parse_object(name, obj, path)
-        elif obj.is_object:
-            data_type = self.parse_object(name, obj, path)
-        elif obj.enum:  # pragma: no cover
-            data_type = self.parse_enum(name, obj, path)
-        elif obj.ref:  # pragma: no cover
-            data_type = self.get_ref_data_type(obj.ref)
-        else:
-            data_type = self.get_data_type(obj)
-        self.parse_ref(obj, path)
-        return data_type
-
-    def parse_request_body(
-        self,
-        name: str,
-        request_body: RequestBodyObject,
-        path: List[str],
-    ) -> None:
-        for (
-            media_type,
-            media_obj,
-        ) in request_body.content.items():  # type: str, MediaObject
-            if isinstance(media_obj.schema_, JsonSchemaObject):
-                self.parse_schema(name, media_obj.schema_, [*path, media_type])
-
-    def parse_responses(
-        self,
-        name: str,
-        responses: Dict[Union[str, int], Union[ReferenceObject, ResponseObject]],
-        path: List[str],
-    ) -> Dict[Union[str, int], Dict[str, DataType]]:
-        data_types: DefaultDict[Union[str, int], Dict[str, DataType]] = defaultdict(
-            dict
-        )
-        for status_code, detail in responses.items():
-            if isinstance(detail, ReferenceObject):
-                if not detail.ref:  # pragma: no cover
-                    continue
-                ref_model = self.get_ref_model(detail.ref)
-                content = {
-                    k: MediaObject.parse_obj(v)
-                    for k, v in ref_model.get('content', {}).items()
-                }
-            else:
-                content = detail.content
-
-            if self.allow_responses_without_content and not content:
-                data_types[status_code]['application/json'] = DataType(type='None')
-
-            for content_type, obj in content.items():
-                object_schema = obj.schema_
-                if not object_schema:  # pragma: no cover
-                    continue
-                if isinstance(object_schema, JsonSchemaObject):
-                    data_types[status_code][content_type] = self.parse_schema(
-                        name, object_schema, [*path, str(status_code), content_type]
-                    )
-                else:
-                    data_types[status_code][content_type] = self.get_ref_data_type(
-                        object_schema.ref
-                    )
-
-        return data_types
-
-    @classmethod
-    def parse_tags(
-        cls,
-        name: str,
-        tags: List[str],
-        path: List[str],
-    ) -> List[str]:
-        return tags
-
-    @classmethod
-    def _get_model_name(cls, path_name: str, method: str, suffix: str) -> str:
-        camel_path_name = snake_to_upper_camel(path_name.replace('/', '_'))
-        return f'{camel_path_name}{method.capitalize()}{suffix}'
-
-    def parse_all_parameters(
-        self,
-        name: str,
-        parameters: List[Union[ReferenceObject, ParameterObject]],
-        path: List[str],
-    ) -> None:
-        fields: List[DataModelFieldBase] = []
-        exclude_field_names: Set[str] = set()
-        reference = self.model_resolver.add(path, name, class_name=True, unique=True)
-        for parameter in parameters:
-            parameter = self.resolve_object(parameter, ParameterObject)
-            parameter_name = parameter.name
-            if not parameter_name or parameter.in_ != ParameterLocation.query:
-                continue
-            field_name, alias = self.model_resolver.get_valid_field_name_and_alias(
-                field_name=parameter_name, excludes=exclude_field_names
-            )
-            if parameter.schema_:
-                fields.append(
-                    self.get_object_field(
-                        field_name=field_name,
-                        field=parameter.schema_,
-                        field_type=self.parse_item(
-                            field_name, parameter.schema_, [*path, name, parameter_name]
-                        ),
-                        original_field_name=parameter_name,
-                        required=parameter.required,
-                        alias=alias,
-                    )
-                )
-            else:
-                data_types: List[DataType] = []
-                object_schema: Optional[JsonSchemaObject] = None
-                for (
-                    media_type,
-                    media_obj,
-                ) in parameter.content.items():
-                    if not media_obj.schema_:
-                        continue
-                    object_schema = self.resolve_object(
-                        media_obj.schema_, JsonSchemaObject
-                    )
-                    data_types.append(
-                        self.parse_item(
-                            field_name,
-                            object_schema,
-                            [*path, name, parameter_name, media_type],
-                        )
-                    )
-
-                if not data_types:
-                    continue
-                if len(data_types) == 1:
-                    data_type = data_types[0]
-                else:
-                    data_type = self.data_type(data_types=data_types)
-                    # multiple data_type parse as non-constraints field
-                    object_schema = None
-                fields.append(
-                    self.data_model_field_type(
-                        name=field_name,
-                        default=object_schema.default if object_schema else None,
-                        data_type=data_type,
-                        required=parameter.required,
-                        alias=alias,
-                        constraints=object_schema.dict()
-                        if object_schema and self.is_constraints_field(object_schema)
-                        else None,
-                        nullable=object_schema.nullable
-                        if object_schema
-                        and self.strict_nullable
-                        and (object_schema.has_default or parameter.required)
-                        else None,
-                        strip_default_none=self.strip_default_none,
-                        extras=self.get_field_extras(object_schema)
-                        if object_schema
-                        else {},
-                        use_annotated=self.use_annotated,
-                        use_field_description=self.use_field_description,
-                        use_default_kwarg=self.use_default_kwarg,
-                        original_name=parameter_name,
-                        has_default=object_schema.has_default
-                        if object_schema
-                        else False,
-                        type_has_null=object_schema.type_has_null
-                        if object_schema
-                        else None,
-                    )
-                )
-
-        if OpenAPIScope.Parameters in self.open_api_scopes and fields:
-            self.results.append(
-                self.data_model_type(
-                    fields=fields,
-                    reference=reference,
-                    custom_base_class=self.base_class,
-                    custom_template_dir=self.custom_template_dir,
-                    keyword_only=self.keyword_only,
-                )
-            )
-
-    def parse_operation(
-        self,
-        raw_operation: Dict[str, Any],
-        path: List[str],
-    ) -> None:
-        operation = Operation.parse_obj(raw_operation)
-        path_name, method = path[-2:]
-        if self.use_operation_id_as_name:
-            if not operation.operationId:
-                raise Error(
-                    f'All operations must have an operationId when --use_operation_id_as_name is set.'
-                    f'The following path was missing an operationId: {path_name}'
-                )
-            path_name = operation.operationId
-            method = ''
-        self.parse_all_parameters(
-            self._get_model_name(path_name, method, suffix='ParametersQuery'),
-            operation.parameters,
-            [*path, 'parameters'],
-        )
-        if operation.requestBody:
-            if isinstance(operation.requestBody, ReferenceObject):
-                ref_model = self.get_ref_model(operation.requestBody.ref)
-                request_body = RequestBodyObject.parse_obj(ref_model)
-            else:
-                request_body = operation.requestBody
-            self.parse_request_body(
-                name=self._get_model_name(path_name, method, suffix='Request'),
-                request_body=request_body,
-                path=[*path, 'requestBody'],
-            )
-        self.parse_responses(
-            name=self._get_model_name(path_name, method, suffix='Response'),
-            responses=operation.responses,
-            path=[*path, 'responses'],
-        )
-        if OpenAPIScope.Tags in self.open_api_scopes:
-            self.parse_tags(
-                name=self._get_model_name(path_name, method, suffix='Tags'),
-                tags=operation.tags,
-                path=[*path, 'tags'],
-            )
-
-    def parse_raw(self) -> None:
-        for source, path_parts in self._get_context_source_path_parts():
-            if self.validation:
-                warn(
-                    'Deprecated: `--validation` option is deprecated. the option will be removed in a future '
-                    'release. please use another tool to validate OpenAPI.\n'
-                )
-
-                try:
-                    from prance import BaseParser
-
-                    BaseParser(
-                        spec_string=source.text,
-                        backend='openapi-spec-validator',
-                        encoding=self.encoding,
-                    )
-                except ImportError:  # pragma: no cover
-                    warn(
-                        'Warning: Validation was skipped for OpenAPI. `prance` or `openapi-spec-validator` are not '
-                        'installed.\n'
-                        'To use --validation option after datamodel-code-generator 0.24.0, Please run `$pip install '
-                        "'datamodel-code-generator[validation]'`.\n"
-                    )
-
-            specification: Dict[str, Any] = load_yaml(source.text)
-            self.raw_obj = specification
-            schemas: Dict[Any, Any] = specification.get('components', {}).get(
-                'schemas', {}
-            )
-            security: Optional[List[Dict[str, List[str]]]] = specification.get(
-                'security'
-            )
-            if OpenAPIScope.Schemas in self.open_api_scopes:
-                for (
-                    obj_name,
-                    raw_obj,
-                ) in schemas.items():  # type: str, Dict[Any, Any]
-                    self.parse_raw_obj(
-                        obj_name,
-                        raw_obj,
-                        [*path_parts, '#/components', 'schemas', obj_name],
-                    )
-            if OpenAPIScope.Paths in self.open_api_scopes:
-                paths: Dict[str, Dict[str, Any]] = specification.get('paths', {})
-                parameters: List[Dict[str, Any]] = [
-                    self._get_ref_body(p['$ref']) if '$ref' in p else p
-                    for p in paths.get('parameters', [])
-                    if isinstance(p, dict)
-                ]
-                paths_path = [*path_parts, '#/paths']
-                for path_name, methods in paths.items():
-                    # Resolve path items if applicable
-                    if '$ref' in methods:
-                        methods = self.get_ref_model(methods['$ref'])
-                    paths_parameters = parameters[:]
-                    if 'parameters' in methods:
-                        paths_parameters.extend(methods['parameters'])
-                    relative_path_name = path_name[1:]
-                    if relative_path_name:
-                        path = [*paths_path, relative_path_name]
-                    else:  # pragma: no cover
-                        path = get_special_path('root', paths_path)
-                    for operation_name, raw_operation in methods.items():
-                        if operation_name not in OPERATION_NAMES:
-                            continue
-                        if paths_parameters:
-                            if 'parameters' in raw_operation:  # pragma: no cover
-                                raw_operation['parameters'].extend(paths_parameters)
-                            else:
-                                raw_operation['parameters'] = paths_parameters
-                        if security is not None and 'security' not in raw_operation:
-                            raw_operation['security'] = security
-                        self.parse_operation(
-                            raw_operation,
-                            [*path, operation_name],
-                        )
-
-        self._resolve_unparsed_json_pointer()
diff -pruN 0.26.4-3/datamodel_code_generator/pydantic_patch.py 0.45.0-1/datamodel_code_generator/pydantic_patch.py
--- 0.26.4-3/datamodel_code_generator/pydantic_patch.py	2024-12-15 17:25:57.706037000 +0000
+++ 0.45.0-1/datamodel_code_generator/pydantic_patch.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,22 +0,0 @@
-import sys
-
-import pydantic.typing
-
-
-def patched_evaluate_forwardref(
-    forward_ref, globalns, localns=None
-):  # pragma: no cover
-    try:
-        return forward_ref._evaluate(
-            globalns, localns or None, set()
-        )  # pragma: no cover
-    except TypeError:
-        # Fallback for Python 3.12 compatibility
-        return forward_ref._evaluate(
-            globalns, localns or None, set(), recursive_guard=set()
-        )
-
-
-# Patch only Python3.12
-if sys.version_info >= (3, 12):
-    pydantic.typing.evaluate_forwardref = patched_evaluate_forwardref
diff -pruN 0.26.4-3/datamodel_code_generator/reference.py 0.45.0-1/datamodel_code_generator/reference.py
--- 0.26.4-3/datamodel_code_generator/reference.py	2024-12-15 17:25:57.706037000 +0000
+++ 0.45.0-1/datamodel_code_generator/reference.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,775 +0,0 @@
-import re
-from collections import defaultdict
-from contextlib import contextmanager
-from enum import Enum, auto
-from functools import lru_cache
-from itertools import zip_longest
-from keyword import iskeyword
-from pathlib import Path, PurePath
-from typing import (
-    TYPE_CHECKING,
-    AbstractSet,
-    Any,
-    Callable,
-    ClassVar,
-    DefaultDict,
-    Dict,
-    Generator,
-    List,
-    Mapping,
-    NamedTuple,
-    Optional,
-    Pattern,
-    Sequence,
-    Set,
-    Tuple,
-    Type,
-    TypeVar,
-    Union,
-)
-from urllib.parse import ParseResult, urlparse
-
-import inflect
-import pydantic
-from packaging import version
-from pydantic import BaseModel
-
-from datamodel_code_generator.util import (
-    PYDANTIC_V2,
-    ConfigDict,
-    cached_property,
-    model_validator,
-)
-
-if TYPE_CHECKING:
-    from pydantic.typing import DictStrAny
-
-
-class _BaseModel(BaseModel):
-    _exclude_fields: ClassVar[Set[str]] = set()
-    _pass_fields: ClassVar[Set[str]] = set()
-
-    if not TYPE_CHECKING:
-
-        def __init__(self, **values: Any) -> None:
-            super().__init__(**values)
-            for pass_field_name in self._pass_fields:
-                if pass_field_name in values:
-                    setattr(self, pass_field_name, values[pass_field_name])
-
-    if not TYPE_CHECKING:
-        if PYDANTIC_V2:
-
-            def dict(
-                self,
-                *,
-                include: Union[
-                    AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], None
-                ] = None,
-                exclude: Union[
-                    AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], None
-                ] = None,
-                by_alias: bool = False,
-                exclude_unset: bool = False,
-                exclude_defaults: bool = False,
-                exclude_none: bool = False,
-            ) -> 'DictStrAny':
-                return self.model_dump(
-                    include=include,
-                    exclude=set(exclude or ()) | self._exclude_fields,
-                    by_alias=by_alias,
-                    exclude_unset=exclude_unset,
-                    exclude_defaults=exclude_defaults,
-                    exclude_none=exclude_none,
-                )
-
-        else:
-
-            def dict(
-                self,
-                *,
-                include: Union[
-                    AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], None
-                ] = None,
-                exclude: Union[
-                    AbstractSet[Union[int, str]], Mapping[Union[int, str], Any], None
-                ] = None,
-                by_alias: bool = False,
-                skip_defaults: Optional[bool] = None,
-                exclude_unset: bool = False,
-                exclude_defaults: bool = False,
-                exclude_none: bool = False,
-            ) -> 'DictStrAny':
-                return super().dict(
-                    include=include,
-                    exclude=set(exclude or ()) | self._exclude_fields,
-                    by_alias=by_alias,
-                    skip_defaults=skip_defaults,
-                    exclude_unset=exclude_unset,
-                    exclude_defaults=exclude_defaults,
-                    exclude_none=exclude_none,
-                )
-
-
-class Reference(_BaseModel):
-    path: str
-    original_name: str = ''
-    name: str
-    duplicate_name: Optional[str] = None
-    loaded: bool = True
-    source: Optional[Any] = None
-    children: List[Any] = []
-    _exclude_fields: ClassVar[Set[str]] = {'children'}
-
-    @model_validator(mode='before')
-    def validate_original_name(cls, values: Any) -> Any:
-        """
-        If original_name is empty then, `original_name` is assigned `name`
-        """
-        if not isinstance(values, dict):  # pragma: no cover
-            return values
-        original_name = values.get('original_name')
-        if original_name:
-            return values
-
-        values['original_name'] = values.get('name', original_name)
-        return values
-
-    if PYDANTIC_V2:
-        # TODO[pydantic]: The following keys were removed: `copy_on_model_validation`.
-        # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
-        model_config = ConfigDict(
-            arbitrary_types_allowed=True,
-            ignored_types=(cached_property,),
-            revalidate_instances='never',
-        )
-    else:
-
-        class Config:
-            arbitrary_types_allowed = True
-            keep_untouched = (cached_property,)
-            copy_on_model_validation = (
-                False
-                if version.parse(pydantic.VERSION) < version.parse('1.9.2')
-                else 'none'
-            )
-
-    @property
-    def short_name(self) -> str:
-        return self.name.rsplit('.', 1)[-1]
-
-
-SINGULAR_NAME_SUFFIX: str = 'Item'
-
-ID_PATTERN: Pattern[str] = re.compile(r'^#[^/].*')
-
-T = TypeVar('T')
-
-
-@contextmanager
-def context_variable(
-    setter: Callable[[T], None], current_value: T, new_value: T
-) -> Generator[None, None, None]:
-    previous_value: T = current_value
-    setter(new_value)
-    try:
-        yield
-    finally:
-        setter(previous_value)
-
-
-_UNDER_SCORE_1: Pattern[str] = re.compile(r'([^_])([A-Z][a-z]+)')
-_UNDER_SCORE_2: Pattern[str] = re.compile('([a-z0-9])([A-Z])')
-
-
-@lru_cache()
-def camel_to_snake(string: str) -> str:
-    subbed = _UNDER_SCORE_1.sub(r'\1_\2', string)
-    return _UNDER_SCORE_2.sub(r'\1_\2', subbed).lower()
-
-
-class FieldNameResolver:
-    def __init__(
-        self,
-        aliases: Optional[Mapping[str, str]] = None,
-        snake_case_field: bool = False,
-        empty_field_name: Optional[str] = None,
-        original_delimiter: Optional[str] = None,
-        special_field_name_prefix: Optional[str] = None,
-        remove_special_field_name_prefix: bool = False,
-        capitalise_enum_members: bool = False,
-        no_alias: bool = False,
-    ):
-        self.aliases: Mapping[str, str] = {} if aliases is None else {**aliases}
-        self.empty_field_name: str = empty_field_name or '_'
-        self.snake_case_field = snake_case_field
-        self.original_delimiter: Optional[str] = original_delimiter
-        self.special_field_name_prefix: Optional[str] = (
-            'field' if special_field_name_prefix is None else special_field_name_prefix
-        )
-        self.remove_special_field_name_prefix: bool = remove_special_field_name_prefix
-        self.capitalise_enum_members: bool = capitalise_enum_members
-        self.no_alias = no_alias
-
-    @classmethod
-    def _validate_field_name(cls, field_name: str) -> bool:
-        return True
-
-    def get_valid_name(
-        self,
-        name: str,
-        excludes: Optional[Set[str]] = None,
-        ignore_snake_case_field: bool = False,
-        upper_camel: bool = False,
-    ) -> str:
-        if not name:
-            name = self.empty_field_name
-        if name[0] == '#':
-            name = name[1:] or self.empty_field_name
-
-        if (
-            self.snake_case_field
-            and not ignore_snake_case_field
-            and self.original_delimiter is not None
-        ):
-            name = snake_to_upper_camel(name, delimiter=self.original_delimiter)
-
-        name = re.sub(r'[¹²³⁴⁵⁶⁷⁸⁹]|\W', '_', name)
-        if name[0].isnumeric():
-            name = f'{self.special_field_name_prefix}_{name}'
-
-        # We should avoid having a field begin with an underscore, as it
-        # causes pydantic to consider it as private
-        while name.startswith('_'):
-            if self.remove_special_field_name_prefix:
-                name = name[1:]
-            else:
-                name = f'{self.special_field_name_prefix}{name}'
-                break
-        if (
-            self.capitalise_enum_members
-            or self.snake_case_field
-            and not ignore_snake_case_field
-        ):
-            name = camel_to_snake(name)
-        count = 1
-        if iskeyword(name) or not self._validate_field_name(name):
-            name += '_'
-        if upper_camel:
-            new_name = snake_to_upper_camel(name)
-        elif self.capitalise_enum_members:
-            new_name = name.upper()
-        else:
-            new_name = name
-        while (
-            not (new_name.isidentifier() or not self._validate_field_name(new_name))
-            or iskeyword(new_name)
-            or (excludes and new_name in excludes)
-        ):
-            new_name = f'{name}{count}' if upper_camel else f'{name}_{count}'
-            count += 1
-        return new_name
-
-    def get_valid_field_name_and_alias(
-        self, field_name: str, excludes: Optional[Set[str]] = None
-    ) -> Tuple[str, Optional[str]]:
-        if field_name in self.aliases:
-            return self.aliases[field_name], field_name
-        valid_name = self.get_valid_name(field_name, excludes=excludes)
-        return (
-            valid_name,
-            None if self.no_alias or field_name == valid_name else field_name,
-        )
-
-
-class PydanticFieldNameResolver(FieldNameResolver):
-    @classmethod
-    def _validate_field_name(cls, field_name: str) -> bool:
-        # TODO: Support Pydantic V2
-        return not hasattr(BaseModel, field_name)
-
-
-class EnumFieldNameResolver(FieldNameResolver):
-    def get_valid_name(
-        self,
-        name: str,
-        excludes: Optional[Set[str]] = None,
-        ignore_snake_case_field: bool = False,
-        upper_camel: bool = False,
-    ) -> str:
-        return super().get_valid_name(
-            name='mro_' if name == 'mro' else name,
-            excludes={'mro'} | (excludes or set()),
-            ignore_snake_case_field=ignore_snake_case_field,
-            upper_camel=upper_camel,
-        )
-
-
-class ModelType(Enum):
-    PYDANTIC = auto()
-    ENUM = auto()
-    CLASS = auto()
-
-
-DEFAULT_FIELD_NAME_RESOLVERS: Dict[ModelType, Type[FieldNameResolver]] = {
-    ModelType.ENUM: EnumFieldNameResolver,
-    ModelType.PYDANTIC: PydanticFieldNameResolver,
-    ModelType.CLASS: FieldNameResolver,
-}
-
-
-class ClassName(NamedTuple):
-    name: str
-    duplicate_name: Optional[str]
-
-
-def get_relative_path(base_path: PurePath, target_path: PurePath) -> PurePath:
-    if base_path == target_path:
-        return Path('.')
-    if not target_path.is_absolute():
-        return target_path
-    parent_count: int = 0
-    children: List[str] = []
-    for base_part, target_part in zip_longest(base_path.parts, target_path.parts):
-        if base_part == target_part and not parent_count:
-            continue
-        if base_part or not target_part:
-            parent_count += 1
-        if target_part:
-            children.append(target_part)
-    return Path(*['..' for _ in range(parent_count)], *children)
-
-
-class ModelResolver:
-    def __init__(
-        self,
-        exclude_names: Optional[Set[str]] = None,
-        duplicate_name_suffix: Optional[str] = None,
-        base_url: Optional[str] = None,
-        singular_name_suffix: Optional[str] = None,
-        aliases: Optional[Mapping[str, str]] = None,
-        snake_case_field: bool = False,
-        empty_field_name: Optional[str] = None,
-        custom_class_name_generator: Optional[Callable[[str], str]] = None,
-        base_path: Optional[Path] = None,
-        field_name_resolver_classes: Optional[
-            Dict[ModelType, Type[FieldNameResolver]]
-        ] = None,
-        original_field_name_delimiter: Optional[str] = None,
-        special_field_name_prefix: Optional[str] = None,
-        remove_special_field_name_prefix: bool = False,
-        capitalise_enum_members: bool = False,
-        no_alias: bool = False,
-    ) -> None:
-        self.references: Dict[str, Reference] = {}
-        self._current_root: Sequence[str] = []
-        self._root_id: Optional[str] = None
-        self._root_id_base_path: Optional[str] = None
-        self.ids: DefaultDict[str, Dict[str, str]] = defaultdict(dict)
-        self.after_load_files: Set[str] = set()
-        self.exclude_names: Set[str] = exclude_names or set()
-        self.duplicate_name_suffix: Optional[str] = duplicate_name_suffix
-        self._base_url: Optional[str] = base_url
-        self.singular_name_suffix: str = (
-            singular_name_suffix
-            if isinstance(singular_name_suffix, str)
-            else SINGULAR_NAME_SUFFIX
-        )
-        merged_field_name_resolver_classes = DEFAULT_FIELD_NAME_RESOLVERS.copy()
-        if field_name_resolver_classes:  # pragma: no cover
-            merged_field_name_resolver_classes.update(field_name_resolver_classes)
-        self.field_name_resolvers: Dict[ModelType, FieldNameResolver] = {
-            k: v(
-                aliases=aliases,
-                snake_case_field=snake_case_field,
-                empty_field_name=empty_field_name,
-                original_delimiter=original_field_name_delimiter,
-                special_field_name_prefix=special_field_name_prefix,
-                remove_special_field_name_prefix=remove_special_field_name_prefix,
-                capitalise_enum_members=capitalise_enum_members
-                if k == ModelType.ENUM
-                else False,
-                no_alias=no_alias,
-            )
-            for k, v in merged_field_name_resolver_classes.items()
-        }
-        self.class_name_generator = (
-            custom_class_name_generator or self.default_class_name_generator
-        )
-        self._base_path: Path = base_path or Path.cwd()
-        self._current_base_path: Optional[Path] = self._base_path
-
-    @property
-    def current_base_path(self) -> Optional[Path]:
-        return self._current_base_path
-
-    def set_current_base_path(self, base_path: Optional[Path]) -> None:
-        self._current_base_path = base_path
-
-    @property
-    def base_url(self) -> Optional[str]:
-        return self._base_url
-
-    def set_base_url(self, base_url: Optional[str]) -> None:
-        self._base_url = base_url
-
-    @contextmanager
-    def current_base_path_context(
-        self, base_path: Optional[Path]
-    ) -> Generator[None, None, None]:
-        if base_path:
-            base_path = (self._base_path / base_path).resolve()
-        with context_variable(
-            self.set_current_base_path, self.current_base_path, base_path
-        ):
-            yield
-
-    @contextmanager
-    def base_url_context(self, base_url: str) -> Generator[None, None, None]:
-        if self._base_url:
-            with context_variable(self.set_base_url, self.base_url, base_url):
-                yield
-        else:
-            yield
-
-    @property
-    def current_root(self) -> Sequence[str]:
-        if len(self._current_root) > 1:
-            return self._current_root
-        return self._current_root
-
-    def set_current_root(self, current_root: Sequence[str]) -> None:
-        self._current_root = current_root
-
-    @contextmanager
-    def current_root_context(
-        self, current_root: Sequence[str]
-    ) -> Generator[None, None, None]:
-        with context_variable(self.set_current_root, self.current_root, current_root):
-            yield
-
-    @property
-    def root_id(self) -> Optional[str]:
-        return self._root_id
-
-    @property
-    def root_id_base_path(self) -> Optional[str]:
-        return self._root_id_base_path
-
-    def set_root_id(self, root_id: Optional[str]) -> None:
-        if root_id and '/' in root_id:
-            self._root_id_base_path = root_id.rsplit('/', 1)[0]
-        else:
-            self._root_id_base_path = None
-
-        self._root_id = root_id
-
-    def add_id(self, id_: str, path: Sequence[str]) -> None:
-        self.ids['/'.join(self.current_root)][id_] = self.resolve_ref(path)
-
-    def resolve_ref(self, path: Union[Sequence[str], str]) -> str:
-        if isinstance(path, str):
-            joined_path = path
-        else:
-            joined_path = self.join_path(path)
-        if joined_path == '#':
-            return f"{'/'.join(self.current_root)}#"
-        if (
-            self.current_base_path
-            and not self.base_url
-            and joined_path[0] != '#'
-            and not is_url(joined_path)
-        ):
-            # resolve local file path
-            file_path, *object_part = joined_path.split('#', 1)
-            resolved_file_path = Path(self.current_base_path, file_path).resolve()
-            joined_path = get_relative_path(
-                self._base_path, resolved_file_path
-            ).as_posix()
-            if object_part:
-                joined_path += f'#{object_part[0]}'
-        if ID_PATTERN.match(joined_path):
-            ref: str = self.ids['/'.join(self.current_root)][joined_path]
-        else:
-            if '#' not in joined_path:
-                joined_path += '#'
-            elif joined_path[0] == '#':
-                joined_path = f'{"/".join(self.current_root)}{joined_path}'
-
-            delimiter = joined_path.index('#')
-            file_path = ''.join(joined_path[:delimiter])
-            ref = f"{''.join(joined_path[:delimiter])}#{''.join(joined_path[delimiter + 1:])}"
-            if self.root_id_base_path and not (
-                is_url(joined_path) or Path(self._base_path, file_path).is_file()
-            ):
-                ref = f'{self.root_id_base_path}/{ref}'
-
-        if self.base_url:
-            from .http import join_url
-
-            joined_url = join_url(self.base_url, ref)
-            if '#' in joined_url:
-                return joined_url
-            return f'{joined_url}#'
-
-        if is_url(ref):
-            file_part, path_part = ref.split('#', 1)
-            if file_part == self.root_id:
-                return f'{"/".join(self.current_root)}#{path_part}'
-            target_url: ParseResult = urlparse(file_part)
-            if not (self.root_id and self.current_base_path):
-                return ref
-            root_id_url: ParseResult = urlparse(self.root_id)
-            if (target_url.scheme, target_url.netloc) == (
-                root_id_url.scheme,
-                root_id_url.netloc,
-            ):  # pragma: no cover
-                target_url_path = Path(target_url.path)
-                relative_target_base = get_relative_path(
-                    Path(root_id_url.path).parent, target_url_path.parent
-                )
-                target_path = (
-                    self.current_base_path / relative_target_base / target_url_path.name
-                )
-                if target_path.exists():
-                    return f'{target_path.resolve().relative_to(self._base_path)}#{path_part}'
-
-        return ref
-
-    def is_after_load(self, ref: str) -> bool:
-        if is_url(ref) or not self.current_base_path:
-            return False
-        file_part, *_ = ref.split('#', 1)
-        absolute_path = Path(self._base_path, file_part).resolve().as_posix()
-        if self.is_external_root_ref(ref):
-            return absolute_path in self.after_load_files
-        elif self.is_external_ref(ref):
-            return absolute_path in self.after_load_files
-        return False  # pragma: no cover
-
-    @staticmethod
-    def is_external_ref(ref: str) -> bool:
-        return '#' in ref and ref[0] != '#'
-
-    @staticmethod
-    def is_external_root_ref(ref: str) -> bool:
-        return ref[-1] == '#'
-
-    @staticmethod
-    def join_path(path: Sequence[str]) -> str:
-        joined_path = '/'.join(p for p in path if p).replace('/#', '#')
-        if '#' not in joined_path:
-            joined_path += '#'
-        return joined_path
-
-    def add_ref(self, ref: str, resolved: bool = False) -> Reference:
-        if not resolved:
-            path = self.resolve_ref(ref)
-        else:
-            path = ref
-        reference = self.references.get(path)
-        if reference:
-            return reference
-        split_ref = ref.rsplit('/', 1)
-        if len(split_ref) == 1:
-            original_name = Path(
-                split_ref[0].rstrip('#')
-                if self.is_external_root_ref(path)
-                else split_ref[0]
-            ).stem
-        else:
-            original_name = (
-                Path(split_ref[1].rstrip('#')).stem
-                if self.is_external_root_ref(path)
-                else split_ref[1]
-            )
-        name = self.get_class_name(original_name, unique=False).name
-        reference = Reference(
-            path=path,
-            original_name=original_name,
-            name=name,
-            loaded=False,
-        )
-
-        self.references[path] = reference
-        return reference
-
-    def add(
-        self,
-        path: Sequence[str],
-        original_name: str,
-        *,
-        class_name: bool = False,
-        singular_name: bool = False,
-        unique: bool = True,
-        singular_name_suffix: Optional[str] = None,
-        loaded: bool = False,
-    ) -> Reference:
-        joined_path = self.join_path(path)
-        reference: Optional[Reference] = self.references.get(joined_path)
-        if reference:
-            if loaded and not reference.loaded:
-                reference.loaded = True
-            if (
-                not original_name
-                or original_name == reference.original_name
-                or original_name == reference.name
-            ):
-                return reference
-        name = original_name
-        duplicate_name: Optional[str] = None
-        if class_name:
-            name, duplicate_name = self.get_class_name(
-                name=name,
-                unique=unique,
-                reserved_name=reference.name if reference else None,
-                singular_name=singular_name,
-                singular_name_suffix=singular_name_suffix,
-            )
-        else:
-            # TODO: create a validate for module name
-            name = self.get_valid_field_name(name, model_type=ModelType.CLASS)
-            if singular_name:  # pragma: no cover
-                name = get_singular_name(
-                    name, singular_name_suffix or self.singular_name_suffix
-                )
-            elif unique:  # pragma: no cover
-                unique_name = self._get_unique_name(name)
-                if unique_name == name:
-                    duplicate_name = name
-                name = unique_name
-        if reference:
-            reference.original_name = original_name
-            reference.name = name
-            reference.loaded = loaded
-            reference.duplicate_name = duplicate_name
-        else:
-            reference = Reference(
-                path=joined_path,
-                original_name=original_name,
-                name=name,
-                loaded=loaded,
-                duplicate_name=duplicate_name,
-            )
-            self.references[joined_path] = reference
-        return reference
-
-    def get(self, path: Union[Sequence[str], str]) -> Optional[Reference]:
-        return self.references.get(self.resolve_ref(path))
-
-    def delete(self, path: Union[Sequence[str], str]) -> None:
-        if self.resolve_ref(path) in self.references:
-            del self.references[self.resolve_ref(path)]
-
-    def default_class_name_generator(self, name: str) -> str:
-        # TODO: create a validate for class name
-        return self.field_name_resolvers[ModelType.CLASS].get_valid_name(
-            name, ignore_snake_case_field=True, upper_camel=True
-        )
-
-    def get_class_name(
-        self,
-        name: str,
-        unique: bool = True,
-        reserved_name: Optional[str] = None,
-        singular_name: bool = False,
-        singular_name_suffix: Optional[str] = None,
-    ) -> ClassName:
-        if '.' in name:
-            split_name = name.split('.')
-            prefix = '.'.join(
-                # TODO: create a validate for class name
-                self.field_name_resolvers[ModelType.CLASS].get_valid_name(
-                    n, ignore_snake_case_field=True
-                )
-                for n in split_name[:-1]
-            )
-            prefix += '.'
-            class_name = split_name[-1]
-        else:
-            prefix = ''
-            class_name = name
-
-        class_name = self.class_name_generator(class_name)
-
-        if singular_name:
-            class_name = get_singular_name(
-                class_name, singular_name_suffix or self.singular_name_suffix
-            )
-        duplicate_name: Optional[str] = None
-        if unique:
-            if reserved_name == class_name:
-                return ClassName(name=class_name, duplicate_name=duplicate_name)
-
-            unique_name = self._get_unique_name(class_name, camel=True)
-            if unique_name != class_name:
-                duplicate_name = class_name
-            class_name = unique_name
-        return ClassName(name=f'{prefix}{class_name}', duplicate_name=duplicate_name)
-
-    def _get_unique_name(self, name: str, camel: bool = False) -> str:
-        unique_name: str = name
-        count: int = 1
-        reference_names = {
-            r.name for r in self.references.values()
-        } | self.exclude_names
-        while unique_name in reference_names:
-            if self.duplicate_name_suffix:
-                name_parts: List[Union[str, int]] = [
-                    name,
-                    self.duplicate_name_suffix,
-                    count - 1,
-                ]
-            else:
-                name_parts = [name, count]
-            delimiter = '' if camel else '_'
-            unique_name = delimiter.join(str(p) for p in name_parts if p)
-            count += 1
-        return unique_name
-
-    @classmethod
-    def validate_name(cls, name: str) -> bool:
-        return name.isidentifier() and not iskeyword(name)
-
-    def get_valid_field_name(
-        self,
-        name: str,
-        excludes: Optional[Set[str]] = None,
-        model_type: ModelType = ModelType.PYDANTIC,
-    ) -> str:
-        return self.field_name_resolvers[model_type].get_valid_name(name, excludes)
-
-    def get_valid_field_name_and_alias(
-        self,
-        field_name: str,
-        excludes: Optional[Set[str]] = None,
-        model_type: ModelType = ModelType.PYDANTIC,
-    ) -> Tuple[str, Optional[str]]:
-        return self.field_name_resolvers[model_type].get_valid_field_name_and_alias(
-            field_name, excludes
-        )
-
-
-@lru_cache()
-def get_singular_name(name: str, suffix: str = SINGULAR_NAME_SUFFIX) -> str:
-    singular_name = inflect_engine.singular_noun(name)
-    if singular_name is False:
-        singular_name = f'{name}{suffix}'
-    return singular_name
-
-
-@lru_cache()
-def snake_to_upper_camel(word: str, delimiter: str = '_') -> str:
-    prefix = ''
-    if word.startswith(delimiter):
-        prefix = '_'
-        word = word[1:]
-
-    return prefix + ''.join(x[0].upper() + x[1:] for x in word.split(delimiter) if x)
-
-
-def is_url(ref: str) -> bool:
-    return ref.startswith(('https://', 'http://'))
-
-
-inflect_engine = inflect.engine()
diff -pruN 0.26.4-3/datamodel_code_generator/types.py 0.45.0-1/datamodel_code_generator/types.py
--- 0.26.4-3/datamodel_code_generator/types.py	2024-12-15 17:25:57.706037000 +0000
+++ 0.45.0-1/datamodel_code_generator/types.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,638 +0,0 @@
-import re
-from abc import ABC, abstractmethod
-from enum import Enum, auto
-from functools import lru_cache
-from itertools import chain
-from typing import (
-    TYPE_CHECKING,
-    Any,
-    Callable,
-    ClassVar,
-    Dict,
-    FrozenSet,
-    Iterable,
-    Iterator,
-    List,
-    Optional,
-    Pattern,
-    Sequence,
-    Set,
-    Tuple,
-    Type,
-    TypeVar,
-    Union,
-)
-
-import pydantic
-from packaging import version
-from pydantic import StrictBool, StrictInt, StrictStr, create_model
-
-from datamodel_code_generator.format import DatetimeClassType, PythonVersion
-from datamodel_code_generator.imports import (
-    IMPORT_ABC_MAPPING,
-    IMPORT_ABC_SEQUENCE,
-    IMPORT_ABC_SET,
-    IMPORT_DICT,
-    IMPORT_FROZEN_SET,
-    IMPORT_LIST,
-    IMPORT_LITERAL,
-    IMPORT_LITERAL_BACKPORT,
-    IMPORT_MAPPING,
-    IMPORT_OPTIONAL,
-    IMPORT_SEQUENCE,
-    IMPORT_SET,
-    IMPORT_UNION,
-    Import,
-)
-from datamodel_code_generator.reference import Reference, _BaseModel
-from datamodel_code_generator.util import (
-    PYDANTIC_V2,
-    ConfigDict,
-    Protocol,
-    runtime_checkable,
-)
-
-if PYDANTIC_V2:
-    from pydantic import GetCoreSchemaHandler
-    from pydantic_core import core_schema
-
-T = TypeVar('T')
-
-OPTIONAL = 'Optional'
-OPTIONAL_PREFIX = f'{OPTIONAL}['
-
-UNION = 'Union'
-UNION_PREFIX = f'{UNION}['
-UNION_DELIMITER = ', '
-UNION_PATTERN: Pattern[str] = re.compile(r'\s*,\s*')
-UNION_OPERATOR_DELIMITER = ' | '
-UNION_OPERATOR_PATTERN: Pattern[str] = re.compile(r'\s*\|\s*')
-NONE = 'None'
-ANY = 'Any'
-LITERAL = 'Literal'
-SEQUENCE = 'Sequence'
-FROZEN_SET = 'FrozenSet'
-MAPPING = 'Mapping'
-DICT = 'Dict'
-SET = 'Set'
-LIST = 'List'
-STANDARD_DICT = 'dict'
-STANDARD_LIST = 'list'
-STANDARD_SET = 'set'
-STR = 'str'
-
-NOT_REQUIRED = 'NotRequired'
-NOT_REQUIRED_PREFIX = f'{NOT_REQUIRED}['
-
-
-class StrictTypes(Enum):
-    str = 'str'
-    bytes = 'bytes'
-    int = 'int'
-    float = 'float'
-    bool = 'bool'
-
-
-class UnionIntFloat:
-    def __init__(self, value: Union[int, float]) -> None:
-        self.value: Union[int, float] = value
-
-    def __int__(self) -> int:
-        return int(self.value)
-
-    def __float__(self) -> float:
-        return float(self.value)
-
-    def __str__(self) -> str:
-        return str(self.value)
-
-    @classmethod
-    def __get_validators__(cls) -> Iterator[Callable[[Any], Any]]:
-        yield cls.validate
-
-    @classmethod
-    def __get_pydantic_core_schema__(
-        cls, _source_type: Any, _handler: 'GetCoreSchemaHandler'
-    ) -> 'core_schema.CoreSchema':
-        from_int_schema = core_schema.chain_schema(
-            [
-                core_schema.union_schema(
-                    [core_schema.int_schema(), core_schema.float_schema()]
-                ),
-                core_schema.no_info_plain_validator_function(cls.validate),
-            ]
-        )
-
-        return core_schema.json_or_python_schema(
-            json_schema=from_int_schema,
-            python_schema=core_schema.union_schema(
-                [
-                    # check if it's an instance first before doing any further work
-                    core_schema.is_instance_schema(UnionIntFloat),
-                    from_int_schema,
-                ]
-            ),
-            serialization=core_schema.plain_serializer_function_ser_schema(
-                lambda instance: instance.value
-            ),
-        )
-
-    @classmethod
-    def validate(cls, v: Any) -> 'UnionIntFloat':
-        if isinstance(v, UnionIntFloat):
-            return v
-        elif not isinstance(v, (int, float)):  # pragma: no cover
-            try:
-                int(v)
-                return cls(v)
-            except (TypeError, ValueError):
-                pass
-            try:
-                float(v)
-                return cls(v)
-            except (TypeError, ValueError):
-                pass
-
-            raise TypeError(f'{v} is not int or float')
-        return cls(v)
-
-
-def chain_as_tuple(*iterables: Iterable[T]) -> Tuple[T, ...]:
-    return tuple(chain(*iterables))
-
-
-@lru_cache()
-def _remove_none_from_type(
-    type_: str, split_pattern: Pattern[str], delimiter: str
-) -> List[str]:
-    types: List[str] = []
-    split_type: str = ''
-    inner_count: int = 0
-    for part in re.split(split_pattern, type_):
-        if part == NONE:
-            continue
-        inner_count += part.count('[') - part.count(']')
-        if split_type:
-            split_type += delimiter
-        if inner_count == 0:
-            if split_type:
-                types.append(f'{split_type}{part}')
-            else:
-                types.append(part)
-            split_type = ''
-            continue
-        else:
-            split_type += part
-    return types
-
-
-def _remove_none_from_union(type_: str, use_union_operator: bool) -> str:
-    if use_union_operator:
-        if not re.match(r'^\w+ | ', type_):
-            return type_
-        return UNION_OPERATOR_DELIMITER.join(
-            _remove_none_from_type(
-                type_, UNION_OPERATOR_PATTERN, UNION_OPERATOR_DELIMITER
-            )
-        )
-
-    if not type_.startswith(UNION_PREFIX):
-        return type_
-    inner_types = _remove_none_from_type(
-        type_[len(UNION_PREFIX) :][:-1], UNION_PATTERN, UNION_DELIMITER
-    )
-
-    if len(inner_types) == 1:
-        return inner_types[0]
-    return f'{UNION_PREFIX}{UNION_DELIMITER.join(inner_types)}]'
-
-
-@lru_cache()
-def get_optional_type(type_: str, use_union_operator: bool) -> str:
-    type_ = _remove_none_from_union(type_, use_union_operator)
-
-    if not type_ or type_ == NONE:
-        return NONE
-    if use_union_operator:
-        return f'{type_} | {NONE}'
-    return f'{OPTIONAL_PREFIX}{type_}]'
-
-
-@runtime_checkable
-class Modular(Protocol):
-    @property
-    def module_name(self) -> str:
-        raise NotImplementedError
-
-
-@runtime_checkable
-class Nullable(Protocol):
-    @property
-    def nullable(self) -> bool:
-        raise NotImplementedError
-
-
-class DataType(_BaseModel):
-    if PYDANTIC_V2:
-        # TODO[pydantic]: The following keys were removed: `copy_on_model_validation`.
-        # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
-        model_config = ConfigDict(
-            extra='forbid',
-            revalidate_instances='never',
-        )
-    else:
-        if not TYPE_CHECKING:
-
-            @classmethod
-            def model_rebuild(cls) -> None:
-                cls.update_forward_refs()
-
-        class Config:
-            extra = 'forbid'
-            copy_on_model_validation = (
-                False
-                if version.parse(pydantic.VERSION) < version.parse('1.9.2')
-                else 'none'
-            )
-
-    type: Optional[str] = None
-    reference: Optional[Reference] = None
-    data_types: List['DataType'] = []
-    is_func: bool = False
-    kwargs: Optional[Dict[str, Any]] = None
-    import_: Optional[Import] = None
-    python_version: PythonVersion = PythonVersion.PY_38
-    is_optional: bool = False
-    is_dict: bool = False
-    is_list: bool = False
-    is_set: bool = False
-    is_custom_type: bool = False
-    literals: List[Union[StrictBool, StrictInt, StrictStr]] = []
-    use_standard_collections: bool = False
-    use_generic_container: bool = False
-    use_union_operator: bool = False
-    alias: Optional[str] = None
-    parent: Optional[Any] = None
-    children: List[Any] = []
-    strict: bool = False
-    dict_key: Optional['DataType'] = None
-
-    _exclude_fields: ClassVar[Set[str]] = {'parent', 'children'}
-    _pass_fields: ClassVar[Set[str]] = {'parent', 'children', 'data_types', 'reference'}
-
-    @classmethod
-    def from_import(
-        cls: Type['DataTypeT'],
-        import_: Import,
-        *,
-        is_optional: bool = False,
-        is_dict: bool = False,
-        is_list: bool = False,
-        is_set: bool = False,
-        is_custom_type: bool = False,
-        strict: bool = False,
-        kwargs: Optional[Dict[str, Any]] = None,
-    ) -> 'DataTypeT':
-        return cls(
-            type=import_.import_,
-            import_=import_,
-            is_optional=is_optional,
-            is_dict=is_dict,
-            is_list=is_list,
-            is_set=is_set,
-            is_func=True if kwargs else False,
-            is_custom_type=is_custom_type,
-            strict=strict,
-            kwargs=kwargs,
-        )
-
-    @property
-    def unresolved_types(self) -> FrozenSet[str]:
-        return frozenset(
-            {
-                t.reference.path
-                for data_types in self.data_types
-                for t in data_types.all_data_types
-                if t.reference
-            }
-            | ({self.reference.path} if self.reference else set())
-        )
-
-    def replace_reference(self, reference: Optional[Reference]) -> None:
-        if not self.reference:  # pragma: no cover
-            raise Exception(
-                f"`{self.__class__.__name__}.replace_reference()` can't be called"
-                f' when `reference` field is empty.'
-            )
-        self_id = id(self)
-        self.reference.children = [
-            c for c in self.reference.children if id(c) != self_id
-        ]
-        self.reference = reference
-        if reference:
-            reference.children.append(self)
-
-    def remove_reference(self) -> None:
-        self.replace_reference(None)
-
-    @property
-    def module_name(self) -> Optional[str]:
-        if self.reference and isinstance(self.reference.source, Modular):
-            return self.reference.source.module_name
-        return None  # pragma: no cover
-
-    @property
-    def full_name(self) -> str:
-        module_name = self.module_name
-        if module_name:
-            return f'{module_name}.{self.reference.short_name}'  # type: ignore
-        return self.reference.short_name  # type: ignore
-
-    @property
-    def all_data_types(self) -> Iterator['DataType']:
-        for data_type in self.data_types:
-            yield from data_type.all_data_types
-        yield self
-
-    @property
-    def all_imports(self) -> Iterator[Import]:
-        for data_type in self.data_types:
-            yield from data_type.all_imports
-        yield from self.imports
-
-    @property
-    def imports(self) -> Iterator[Import]:
-        if self.import_:
-            yield self.import_
-        imports: Tuple[Tuple[bool, Import], ...] = (
-            (self.is_optional and not self.use_union_operator, IMPORT_OPTIONAL),
-            (len(self.data_types) > 1 and not self.use_union_operator, IMPORT_UNION),
-        )
-        if any(self.literals):
-            import_literal = (
-                IMPORT_LITERAL
-                if self.python_version.has_literal_type
-                else IMPORT_LITERAL_BACKPORT
-            )
-            imports = (
-                *imports,
-                (any(self.literals), import_literal),
-            )
-
-        if self.use_generic_container:
-            if self.use_standard_collections:
-                imports = (
-                    *imports,
-                    (self.is_list, IMPORT_ABC_SEQUENCE),
-                    (self.is_set, IMPORT_ABC_SET),
-                    (self.is_dict, IMPORT_ABC_MAPPING),
-                )
-            else:
-                imports = (
-                    *imports,
-                    (self.is_list, IMPORT_SEQUENCE),
-                    (self.is_set, IMPORT_FROZEN_SET),
-                    (self.is_dict, IMPORT_MAPPING),
-                )
-        elif not self.use_standard_collections:
-            imports = (
-                *imports,
-                (self.is_list, IMPORT_LIST),
-                (self.is_set, IMPORT_SET),
-                (self.is_dict, IMPORT_DICT),
-            )
-        for field, import_ in imports:
-            if field and import_ != self.import_:
-                yield import_
-
-        if self.dict_key:
-            yield from self.dict_key.imports
-
-    def __init__(self, **values: Any) -> None:
-        if not TYPE_CHECKING:
-            super().__init__(**values)
-
-        for type_ in self.data_types:
-            if type_.type == ANY and type_.is_optional:
-                if any(t for t in self.data_types if t.type != ANY):  # pragma: no cover
-                    self.is_optional = True
-                    self.data_types = [
-                        t
-                        for t in self.data_types
-                        if not (t.type == ANY and t.is_optional)
-                    ]
-                break  # pragma: no cover
-
-        for data_type in self.data_types:
-            if data_type.reference or data_type.data_types:
-                data_type.parent = self
-
-        if self.reference:
-            self.reference.children.append(self)
-
-    @property
-    def type_hint(self) -> str:
-        type_: Optional[str] = self.alias or self.type
-        if not type_:
-            if self.is_union:
-                data_types: List[str] = []
-                for data_type in self.data_types:
-                    data_type_type = data_type.type_hint
-                    if data_type_type in data_types:  # pragma: no cover
-                        continue
-
-                    if NONE == data_type_type:
-                        self.is_optional = True
-                        continue
-
-                    non_optional_data_type_type = _remove_none_from_union(
-                        data_type_type, self.use_union_operator
-                    )
-
-                    if non_optional_data_type_type != data_type_type:
-                        self.is_optional = True
-
-                    data_types.append(non_optional_data_type_type)
-                if len(data_types) == 1:
-                    type_ = data_types[0]
-                else:
-                    if self.use_union_operator:
-                        type_ = UNION_OPERATOR_DELIMITER.join(data_types)
-                    else:
-                        type_ = f'{UNION_PREFIX}{UNION_DELIMITER.join(data_types)}]'
-            elif len(self.data_types) == 1:
-                type_ = self.data_types[0].type_hint
-            elif self.literals:
-                type_ = f"{LITERAL}[{', '.join(repr(literal) for literal in self.literals)}]"
-            else:
-                if self.reference:
-                    type_ = self.reference.short_name
-                else:
-                    # TODO support strict Any
-                    # type_ = 'Any'
-                    type_ = ''
-        if self.reference:
-            source = self.reference.source
-            if isinstance(source, Nullable) and source.nullable:
-                self.is_optional = True
-        if self.reference and self.python_version == PythonVersion.PY_36:
-            type_ = f"'{type_}'"
-        if self.is_list:
-            if self.use_generic_container:
-                list_ = SEQUENCE
-            elif self.use_standard_collections:
-                list_ = STANDARD_LIST
-            else:
-                list_ = LIST
-            type_ = f'{list_}[{type_}]' if type_ else list_
-        elif self.is_set:
-            if self.use_generic_container:
-                set_ = FROZEN_SET
-            elif self.use_standard_collections:
-                set_ = STANDARD_SET
-            else:
-                set_ = SET
-            type_ = f'{set_}[{type_}]' if type_ else set_
-        elif self.is_dict:
-            if self.use_generic_container:
-                dict_ = MAPPING
-            elif self.use_standard_collections:
-                dict_ = STANDARD_DICT
-            else:
-                dict_ = DICT
-            if self.dict_key or type_:
-                key = self.dict_key.type_hint if self.dict_key else STR
-                type_ = f'{dict_}[{key}, {type_ or ANY}]'
-            else:  # pragma: no cover
-                type_ = dict_
-        if self.is_optional and type_ != ANY:
-            return get_optional_type(type_, self.use_union_operator)
-        elif self.is_func:
-            if self.kwargs:
-                kwargs: str = ', '.join(f'{k}={v}' for k, v in self.kwargs.items())
-                return f'{type_}({kwargs})'
-            return f'{type_}()'
-        return type_
-
-    @property
-    def is_union(self) -> bool:
-        return len(self.data_types) > 1
-
-
-DataType.model_rebuild()
-
-DataTypeT = TypeVar('DataTypeT', bound=DataType)
-
-
-class EmptyDataType(DataType):
-    pass
-
-
-class Types(Enum):
-    integer = auto()
-    int32 = auto()
-    int64 = auto()
-    number = auto()
-    float = auto()
-    double = auto()
-    decimal = auto()
-    time = auto()
-    string = auto()
-    byte = auto()
-    binary = auto()
-    date = auto()
-    date_time = auto()
-    timedelta = auto()
-    password = auto()
-    path = auto()
-    email = auto()
-    uuid = auto()
-    uuid1 = auto()
-    uuid2 = auto()
-    uuid3 = auto()
-    uuid4 = auto()
-    uuid5 = auto()
-    uri = auto()
-    hostname = auto()
-    ipv4 = auto()
-    ipv4_network = auto()
-    ipv6 = auto()
-    ipv6_network = auto()
-    boolean = auto()
-    object = auto()
-    null = auto()
-    array = auto()
-    any = auto()
-
-
-class DataTypeManager(ABC):
-    def __init__(
-        self,
-        python_version: PythonVersion = PythonVersion.PY_38,
-        use_standard_collections: bool = False,
-        use_generic_container_types: bool = False,
-        strict_types: Optional[Sequence[StrictTypes]] = None,
-        use_non_positive_negative_number_constrained_types: bool = False,
-        use_union_operator: bool = False,
-        use_pendulum: bool = False,
-        target_datetime_class: Optional[DatetimeClassType] = None,
-    ) -> None:
-        self.python_version = python_version
-        self.use_standard_collections: bool = use_standard_collections
-        self.use_generic_container_types: bool = use_generic_container_types
-        self.strict_types: Sequence[StrictTypes] = strict_types or ()
-        self.use_non_positive_negative_number_constrained_types: bool = (
-            use_non_positive_negative_number_constrained_types
-        )
-        self.use_union_operator: bool = use_union_operator
-        self.use_pendulum: bool = use_pendulum
-        self.target_datetime_class: DatetimeClassType = target_datetime_class
-
-        if (
-            use_generic_container_types and python_version == PythonVersion.PY_36
-        ):  # pragma: no cover
-            raise Exception(
-                'use_generic_container_types can not be used with target_python_version 3.6.\n'
-                ' The version will be not supported in a future version'
-            )
-
-        if TYPE_CHECKING:
-            self.data_type: Type[DataType]
-        else:
-            self.data_type: Type[DataType] = create_model(
-                'ContextDataType',
-                python_version=(PythonVersion, python_version),
-                use_standard_collections=(bool, use_standard_collections),
-                use_generic_container=(bool, use_generic_container_types),
-                use_union_operator=(bool, use_union_operator),
-                __base__=DataType,
-            )
-
-    @abstractmethod
-    def get_data_type(self, types: Types, **kwargs: Any) -> DataType:
-        raise NotImplementedError
-
-    def get_data_type_from_full_path(
-        self, full_path: str, is_custom_type: bool
-    ) -> DataType:
-        return self.data_type.from_import(
-            Import.from_full_path(full_path), is_custom_type=is_custom_type
-        )
-
-    def get_data_type_from_value(self, value: Any) -> DataType:
-        type_: Optional[Types] = None
-        if isinstance(value, str):
-            type_ = Types.string
-        elif isinstance(value, bool):
-            type_ = Types.boolean
-        elif isinstance(value, int):
-            type_ = Types.integer
-        elif isinstance(value, float):
-            type_ = Types.float
-        elif isinstance(value, dict):
-            return self.data_type.from_import(IMPORT_DICT)
-        elif isinstance(value, list):
-            return self.data_type.from_import(IMPORT_LIST)
-        else:
-            type_ = Types.any
-        return self.get_data_type(type_)
diff -pruN 0.26.4-3/datamodel_code_generator/util.py 0.45.0-1/datamodel_code_generator/util.py
--- 0.26.4-3/datamodel_code_generator/util.py	2024-12-15 17:25:57.706037000 +0000
+++ 0.45.0-1/datamodel_code_generator/util.py	1970-01-01 00:00:00.000000000 +0000
@@ -1,106 +0,0 @@
-from __future__ import annotations
-
-import copy
-from functools import cached_property  # noqa: F401
-from pathlib import Path
-from typing import (  # noqa: F401
-    TYPE_CHECKING,
-    Any,
-    Callable,
-    Dict,
-    Protocol,
-    TypeVar,
-    runtime_checkable,
-)
-
-import pydantic
-from packaging import version
-from pydantic import BaseModel as _BaseModel
-
-PYDANTIC_VERSION = version.parse(
-    pydantic.VERSION if isinstance(pydantic.VERSION, str) else str(pydantic.VERSION)
-)
-
-PYDANTIC_V2: bool = PYDANTIC_VERSION >= version.parse('2.0b3')
-
-if TYPE_CHECKING:
-    from typing import Literal
-
-    from yaml import SafeLoader
-
-    def load_toml(path: Path) -> Dict[str, Any]: ...
-
-else:
-    try:
-        from yaml import CSafeLoader as SafeLoader
-    except ImportError:  # pragma: no cover
-        from yaml import SafeLoader
-
-    try:
-        import tomllib
-
-        def load_toml(path: Path) -> Dict[str, Any]:
-            with path.open('rb') as f:
-                return tomllib.load(f)
-
-    except ImportError:
-        import toml
-
-        def load_toml(path: Path) -> Dict[str, Any]:
-            return toml.load(path)
-
-
-SafeLoaderTemp = copy.deepcopy(SafeLoader)
-SafeLoaderTemp.yaml_constructors = copy.deepcopy(SafeLoader.yaml_constructors)
-SafeLoaderTemp.add_constructor(
-    'tag:yaml.org,2002:timestamp',
-    SafeLoaderTemp.yaml_constructors['tag:yaml.org,2002:str'],
-)
-SafeLoader = SafeLoaderTemp
-
-Model = TypeVar('Model', bound=_BaseModel)
-
-
-def model_validator(
-    mode: Literal['before', 'after'] = 'after',
-) -> Callable[[Callable[[Model, Any], Any]], Callable[[Model, Any], Any]]:
-    def inner(method: Callable[[Model, Any], Any]) -> Callable[[Model, Any], Any]:
-        if PYDANTIC_V2:
-            from pydantic import model_validator as model_validator_v2
-
-            return model_validator_v2(mode=mode)(method)  # type: ignore
-        else:
-            from pydantic import root_validator
-
-            return root_validator(method, pre=mode == 'before')  # type: ignore
-
-    return inner
-
-
-def field_validator(
-    field_name: str,
-    *fields: str,
-    mode: Literal['before', 'after'] = 'after',
-) -> Callable[[Any], Callable[[Model, Any], Any]]:
-    def inner(method: Callable[[Model, Any], Any]) -> Callable[[Model, Any], Any]:
-        if PYDANTIC_V2:
-            from pydantic import field_validator as field_validator_v2
-
-            return field_validator_v2(field_name, *fields, mode=mode)(method)  # type: ignore
-        else:
-            from pydantic import validator
-
-            return validator(field_name, *fields, pre=mode == 'before')(method)  # type: ignore
-
-    return inner
-
-
-if PYDANTIC_V2:
-    from pydantic import ConfigDict as ConfigDict
-else:
-    ConfigDict = dict  # type: ignore
-
-
-class BaseModel(_BaseModel):
-    if PYDANTIC_V2:
-        model_config = ConfigDict(strict=False)
diff -pruN 0.26.4-3/datamodel_code_generator/version.py 0.45.0-1/datamodel_code_generator/version.py
--- 0.26.4-3/datamodel_code_generator/version.py	2024-12-15 17:26:17.175006400 +0000
+++ 0.45.0-1/datamodel_code_generator/version.py	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-version: str = '0.26.4'
diff -pruN 0.26.4-3/debian/changelog 0.45.0-1/debian/changelog
--- 0.26.4-3/debian/changelog	2025-01-30 12:29:15.000000000 +0000
+++ 0.45.0-1/debian/changelog	2025-12-19 20:09:00.000000000 +0000
@@ -1,3 +1,45 @@
+python-datamodel-code-generator (0.45.0-1) unstable; urgency=medium
+
+  * New upstream version 0.45.0 (Closes: #1123210)
+  * Drop upstreamed isort patch
+  * Add B-D python3-inline-snapshot
+  * Add B-D python3-time-machine
+  * Add B-D python3-watchfiles
+
+ -- Simon Josefsson <simon@josefsson.org>  Fri, 19 Dec 2025 21:09:00 +0100
+
+python-datamodel-code-generator (0.35.0-1) unstable; urgency=medium
+
+  * Re-enable tests with prance.
+  * Mark B-D help2man as !nodoc.
+  * Patch to support isort v7.
+  * New upstream version 0.35.0
+  * Run wrap-and-sort -satbk.
+
+ -- Simon Josefsson <simon@josefsson.org>  Sat, 18 Oct 2025 10:26:45 +0200
+
+python-datamodel-code-generator (0.34.0-1) unstable; urgency=medium
+
+  * Use watch v5.
+  * New upstream version 0.34.0
+  * Drop Rules-Requires-Root: no.
+  * Use Salsa CI for licenserecon.
+
+ -- Simon Josefsson <simon@josefsson.org>  Tue, 30 Sep 2025 09:41:25 +0200
+
+python-datamodel-code-generator (0.33.0-1) unstable; urgency=medium
+
+  * Fix watch URL.
+  * New upstream version 0.33.0
+  * Drop upstreamed isort-6.patch.
+  * Adapt PYTHONPATH for source path move.
+  * Mark datamodel-codegen Multi-Arch: foreign.
+  * Standards-Version: 4.7.2.
+  * Add build deps.
+  * Disable python-prance tests.
+
+ -- Simon Josefsson <simon@josefsson.org>  Tue, 19 Aug 2025 00:03:03 +0200
+
 python-datamodel-code-generator (0.26.4-3) unstable; urgency=medium
 
   * Team upload.
diff -pruN 0.26.4-3/debian/control 0.45.0-1/debian/control
--- 0.26.4-3/debian/control	2025-01-30 12:29:15.000000000 +0000
+++ 0.45.0-1/debian/control	2025-12-19 20:09:00.000000000 +0000
@@ -3,27 +3,38 @@ Maintainer: Debian Python Team <team+pyt
 Uploaders:
  Simon Josefsson <simon@josefsson.org>,
 Priority: optional
-Standards-Version: 4.7.0
+Standards-Version: 4.7.2
 Section: python
 Homepage: https://github.com/koxudaxi/datamodel-code-generator
 Build-Depends:
  debhelper-compat (= 13),
  dh-sequence-python3,
  black,
- help2man,
+ help2man <!nodoc>,
  pybuild-plugin-pyproject,
  python3-all,
  python3-argcomplete,
+ python3-freezegun <!nocheck>,
+ python3-genson <!nocheck>,
+ python3-graphql-core <!nocheck>,
+ python3-hatchling,
+ python3-httpx <!nocheck>,
+ python3-inline-snapshot <!nocheck>,
  python3-isort,
  python3-jinja2,
  python3-poetry-core,
+ python3-prance <!nocheck>,
  python3-pydantic,
+ python3-pytest <!nocheck>,
+ python3-pytest-benchmark <!nocheck>,
+ python3-pytest-mock <!nocheck>,
  python3-setuptools,
+ python3-time-machine <!nocheck>,
+ python3-watchfiles <!nocheck>,
  python3-yaml,
 Vcs-Git: https://salsa.debian.org/python-team/packages/python-datamodel-code-generator.git
 Vcs-Browser: https://salsa.debian.org/python-team/packages/python-datamodel-code-generator
 Testsuite: autopkgtest-pkg-pybuild
-Rules-Requires-Root: no
 
 Package: python3-datamodel-code-generator
 Architecture: all
@@ -41,6 +52,7 @@ Description: pydantic code generator fro
 Package: datamodel-codegen
 Section: devel
 Architecture: all
+Multi-Arch: foreign
 Depends:
  ${misc:Depends},
  ${python3:Depends},
diff -pruN 0.26.4-3/debian/patches/isort-6.patch 0.45.0-1/debian/patches/isort-6.patch
--- 0.26.4-3/debian/patches/isort-6.patch	2025-01-30 12:29:15.000000000 +0000
+++ 0.45.0-1/debian/patches/isort-6.patch	1970-01-01 00:00:00.000000000 +0000
@@ -1,27 +0,0 @@
-From: Colin Watson <cjwatson@debian.org>
-Date: Thu, 30 Jan 2025 12:21:14 +0000
-Subject: Support isort 6
-
-The breaking changes in isort 6 are just to remove support for Python
-3.8 (see https://github.com/PyCQA/isort/releases), so there's no need
-for this project to treat it differently from isort 5.
-
-Forwarded: https://github.com/koxudaxi/datamodel-code-generator/pull/2289
-Last-Update: 2025-01-30
----
- pyproject.toml | 2 +-
- 1 file changed, 1 insertion(+), 1 deletion(-)
-
-diff --git a/pyproject.toml b/pyproject.toml
-index 921fee8..a2c2b28 100644
---- a/pyproject.toml
-+++ b/pyproject.toml
-@@ -52,7 +52,7 @@ argcomplete = ">=1.10,<4.0"
- jinja2 = ">=2.10.1,<4.0"
- inflect = ">=4.1.0,<6.0"
- black = ">=19.10b0"
--isort = ">=4.3.21,<6.0"
-+isort = ">=4.3.21,<7.0"
- genson = ">=1.2.1,<2.0"
- packaging = "*"
- prance = { version = ">=0.18.2", optional = true }
diff -pruN 0.26.4-3/debian/patches/series 0.45.0-1/debian/patches/series
--- 0.26.4-3/debian/patches/series	2025-01-30 12:29:15.000000000 +0000
+++ 0.45.0-1/debian/patches/series	1970-01-01 00:00:00.000000000 +0000
@@ -1 +0,0 @@
-isort-6.patch
diff -pruN 0.26.4-3/debian/rules 0.45.0-1/debian/rules
--- 0.26.4-3/debian/rules	2025-01-30 12:29:15.000000000 +0000
+++ 0.45.0-1/debian/rules	2025-12-19 20:09:00.000000000 +0000
@@ -2,6 +2,9 @@
 
 include /usr/share/dpkg/pkg-info.mk # DEB_VERSION
 
+# downloads from fromteamdigitale.github.io, see tests/data/openapi/refs.yaml
+export PYBUILD_TEST_ARGS= -k 'not test_openapi_parser_parse_remote_ref'
+
 %:
 	dh $@ --buildsystem=pybuild
 
@@ -11,7 +14,7 @@ M = $(CURDIR)/debian/tmp/usr/share/man/m
 execute_after_dh_auto_install:
 ifeq (,$(filter nodoc,$(DEB_BUILD_PROFILES)))
 	mkdir -pv $(M)
-	env PYTHONPATH=$(CURDIR) \
+	env PYTHONPATH=$(CURDIR)/src \
 		help2man --no-info --version-string="$(DEB_VERSION)" \
 		--help-option="--no-color --help" \
 		-Idebian/datamodel-codegen.h2m \
diff -pruN 0.26.4-3/debian/salsa-ci.yml 0.45.0-1/debian/salsa-ci.yml
--- 0.26.4-3/debian/salsa-ci.yml	2025-01-30 12:29:15.000000000 +0000
+++ 0.45.0-1/debian/salsa-ci.yml	2025-12-19 20:09:00.000000000 +0000
@@ -1,11 +1,11 @@
 include:
 - https://salsa.debian.org/salsa-ci-team/pipeline/raw/master/recipes/debian.yml
-- https://salsa.debian.org/debian/licenserecon/raw/main/debian/licenserecon.yml
 
 variables:
+  SALSA_CI_AUTOPKGTEST_ALLOWED_EXIT_STATUS: '0'
   SALSA_CI_DISABLE_APTLY: 0
+  SALSA_CI_ENABLE_LICENSERECON: 1
   SALSA_CI_ENABLE_WRAP_AND_SORT: '1'
-  SALSA_CI_WRAP_AND_SORT_ARGS: '-asbkt'
-  SALSA_CI_AUTOPKGTEST_ALLOWED_EXIT_STATUS: '0'
   SALSA_CI_LINTIAN_FAIL_WARNING: '1'
   SALSA_CI_LINTIAN_SUPPRESS_TAGS: 'orig-tarball-missing-upstream-signature'
+  SALSA_CI_WRAP_AND_SORT_ARGS: '-asbkt'
diff -pruN 0.26.4-3/debian/watch 0.45.0-1/debian/watch
--- 0.26.4-3/debian/watch	2025-01-30 12:29:15.000000000 +0000
+++ 0.45.0-1/debian/watch	2025-12-19 20:09:00.000000000 +0000
@@ -1,3 +1,4 @@
-version=4
-opts=uversionmangle=s/(rc|a|b|c)/~$1/ \
-https://pypi.debian.net/datamodel-code-generator/datamodel-code-generator-(.+)\.(?:zip|tgz|tbz|txz|(?:tar\.(?:gz|bz2|xz)))
+Version: 5
+Source: https://pypi.debian.net/datamodel-code-generator/
+Matching-Pattern: datamodel_code_generator-@ANY_VERSION@@ARCHIVE_EXT@
+Uversionmangle: s/(rc|a|b|c)/~$1/
diff -pruN 0.26.4-3/pyproject.toml 0.45.0-1/pyproject.toml
--- 0.26.4-3/pyproject.toml	2024-12-15 17:26:17.175006400 +0000
+++ 0.45.0-1/pyproject.toml	2025-12-19 19:37:31.000000000 +0000
@@ -1,164 +1,261 @@
-[tool.poetry]
+[build-system]
+build-backend = "hatchling.build"
+requires = [
+  "hatch-vcs>=0.4",
+  "hatchling>=1.25",
+]
+
+[project]
 name = "datamodel-code-generator"
-version = "0.26.4"
 description = "Datamodel Code Generator"
-authors = ["Koudai Aono <koxudaxi@gmail.com>"]
-readme = "README.md"
+readme.content-type = "text/markdown"
+readme.file = "README.md"
 license = "MIT"
-homepage = "https://github.com/koxudaxi/datamodel-code-generator"
-repository = "https://github.com/koxudaxi/datamodel-code-generator"
-
-
+authors = [ { name = "Koudai Aono", email = "koxudaxi@gmail.com" } ]
+requires-python = ">=3.9"
 classifiers = [
-        "Development Status :: 4 - Beta",
-        "Natural Language :: English",
-        "License :: OSI Approved :: MIT License",
-        "Programming Language :: Python :: 3",
-        "Programming Language :: Python :: 3.8",
-        "Programming Language :: Python :: 3.9",
-        "Programming Language :: Python :: 3.10",
-        "Programming Language :: Python :: 3.11",
-        "Programming Language :: Python :: 3.12",
-        "Programming Language :: Python :: 3.13",
-        "Programming Language :: Python :: Implementation :: CPython"]
+  "Development Status :: 4 - Beta",
+  "License :: OSI Approved :: MIT License",
+  "Natural Language :: English",
+  "Programming Language :: Python :: 3 :: Only",
+  "Programming Language :: Python :: 3.9",
+  "Programming Language :: Python :: 3.10",
+  "Programming Language :: Python :: 3.11",
+  "Programming Language :: Python :: 3.12",
+  "Programming Language :: Python :: 3.13",
+  "Programming Language :: Python :: 3.14",
+  "Programming Language :: Python :: Implementation :: CPython",
+]
+dynamic = [
+  "version",
+]
+dependencies = [
+  "argcomplete>=2.10.1,<4",
+  "black>=19.10b0",
+  "genson>=1.2.1,<2",
+  "inflect>=4.1,<8",
+  "isort>=4.3.21,<8",
+  "jinja2>=2.10.1,<4",
+  "packaging",
+  "pydantic>=1.5",
+  "pyyaml>=6.0.1",
+  "tomli>=2.2.1,<3; python_version<='3.11'",
+]
+optional-dependencies.all = [
+  "datamodel-code-generator[debug]",
+  "datamodel-code-generator[graphql]",
+  "datamodel-code-generator[http]",
+  "datamodel-code-generator[ruff]",
+  "datamodel-code-generator[validation]",
+  "datamodel-code-generator[watch]",
+]
+optional-dependencies.debug = [
+  "pysnooper>=0.4.1,<2",
+]
+optional-dependencies.graphql = [
+  "graphql-core>=3.2.3",
+]
+optional-dependencies.http = [
+  "httpx>=0.24.1",
+]
+optional-dependencies.ruff = [
+  "ruff>=0.9.10",
+]
+optional-dependencies.validation = [
+  "openapi-spec-validator>=0.2.8,<0.8",
+  "prance>=0.18.2",
+]
+optional-dependencies.watch = [
+  "watchfiles>=1.1",
+]
+urls.Homepage = "https://github.com/koxudaxi/datamodel-code-generator"
+urls.Source = "https://github.com/koxudaxi/datamodel-code-generator"
+scripts.datamodel-codegen = "datamodel_code_generator.__main__:main"
+
+[dependency-groups]
+dev = [
+  { include-group = "coverage" },
+  { include-group = "docs" },
+  { include-group = "fix" },
+  { include-group = "pkg-meta" },
+  { include-group = "test" },
+  { include-group = "type" },
+]
+test = [
+  "freezegun; python_version<'3.10'",
+  "inline-snapshot>=0.31.1",
+  "msgspec>=0.18",
+  "pytest>=6.1",
+  "pytest>=8.3.4",
+  "pytest-benchmark",
+  "pytest-codspeed>=2.2",
+  "pytest-cov>=2.12.1",
+  "pytest-cov>=5",
+  "pytest-mock>=3.14",
+  "pytest-xdist>=3.3.1",
+  "setuptools; python_version<'3.10'",
+  "time-machine>=3.1; python_version>='3.10'",
+  "watchfiles>=1.1",
+  { include-group = "coverage" },
+]
+type = [
+  "pyright>=1.1.393",
+  "types-jinja2",
+  "types-pyyaml",
+  "types-setuptools>=67.6.0.5,<70",
+  "types-toml",
+  { include-group = "test" },
+]
+docs = [
+  "zensical>=0.0.11; python_version>='3.10'",
+]
+black22 = [ "black==22.1" ]
+black23 = [ "black==23.12" ]
+black24 = [ "black==24.1" ]
+isort5 = [ "isort>=5,<6" ]
+isort6 = [ "isort>=6,<7" ]
+isort7 = [ "isort>=7,<8; python_version>='3.10'" ]
+pydantic1 = [ "pydantic<2" ]
+fix = [ "pre-commit-uv>=4.1.4" ]
+pkg-meta = [ "check-wheel-contents>=0.6.1", "twine>=6.1", "uv>=0.5.22" ]
+coverage = [
+  "covdefaults>=2.3",
+  "coverage[toml]>=7.6.1",
+  "diff-cover>=9.7.2",
+]
 
-[build-system]
-requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"]
-build-backend = "poetry.core.masonry.api"
+[tool.hatch]
+build.dev-mode-dirs = [ "src" ]
+build.targets.sdist.include = [
+  "/src",
+  "/tests",
+]
+version.source = "vcs"
 
-[tool.poetry-dynamic-versioning]
-enable = false
-vcs = "git"
-# language=RegExp
-pattern = '^(?P<base>\d+\.\d+\.\d+)(-?((?P<stage>[a-zA-Z]+)\.?(?P<revision>\d+)?))?$'
-
-[tool.poetry-dynamic-versioning.substitution]
-files = ["*/version.py"]
-patterns = ["(^version: str = ')[^']*(')"]
-
-
-[tool.poetry.scripts]
-datamodel-codegen = "datamodel_code_generator.__main__:main"
-
-[tool.poetry.dependencies]
-python = "^3.8"
-pydantic =  [
-    {extras = ["email"], version = ">=1.5.1,<3.0,!=2.4.0", python = "<3.10"},
-    {extras = ["email"], version = ">=1.9.0,<3.0,!=2.4.0", python = "~3.10"},
-    {extras = ["email"], version = ">=1.10.0,<3.0,!=2.4.0", python = "^3.11"},
-    {extras = ["email"], version = ">=1.10.0,!=2.0.0,!=2.0.1,<3.0,!=2.4.0", python = "^3.12"}
-]
-argcomplete = ">=1.10,<4.0"
-jinja2 = ">=2.10.1,<4.0"
-inflect = ">=4.1.0,<6.0"
-black = ">=19.10b0"
-isort = ">=4.3.21,<6.0"
-genson = ">=1.2.1,<2.0"
-packaging = "*"
-prance = { version = ">=0.18.2", optional = true }
-openapi-spec-validator = { version = ">=0.2.8,<0.7.0", optional = true }
-toml = { version = ">=0.10.0,<1.0.0", python = "<3.11" }
-PySnooper = { version = ">=0.4.1,<2.0.0", optional = true }
-httpx = { version = "*", optional = true }
-pyyaml = ">=6.0.1"
-graphql-core = {version = "^3.2.3", optional = true}
-
-[tool.poetry.group.dev.dependencies]
-pytest = ">6.1"
-pytest-benchmark = "*"
-pytest-cov = ">=2.12.1"
-pytest-mock = "*"
-mypy = ">=1.4.1,<1.5.0"
-black = ">=23.3,<25.0"
-freezegun = "*"
-types-Jinja2 = "*"
-types-PyYAML = "*"
-types-toml = "*"
-types-setuptools = ">=67.6.0.5,<70.0.0.0"
-pydantic = "*"
-httpx = ">=0.24.1"
-PySnooper = "*"
-ruff = ">=0.0.290,<0.7.5"
-ruff-lsp = ">=0.0.39,<0.0.60"
-pre-commit = "*"
-pytest-xdist = "^3.3.1"
-prance = "*"
-openapi-spec-validator = "*"
-pytest-codspeed = "^2.2.0"
-
-
-[tool.poetry.extras]
-http = ["httpx"]
-graphql = ["graphql-core"]
-debug = ["PySnooper"]
-validation = ["prance", "openapi-spec-validator"]
+[tool.uv]
+conflicts = [
+  [
+    { group = "black24" },
+    { group = "black22" },
+    { group = "black23" },
+    { group = "dev" },
+  ],
+  [
+    { group = "isort5" },
+    { group = "isort6" },
+    { group = "isort7" },
+    { group = "dev" },
+  ],
+  [
+    { group = "pydantic1" },
+    { group = "pkg-meta" },
+    { group = "dev" },
+  ],
+]
 
 [tool.ruff]
-line-length = 88
-extend-select = ['Q', 'RUF100', 'C4', 'UP', 'I']
-flake8-quotes = {inline-quotes = 'single', multiline-quotes = 'double'}
-target-version = 'py37'
-ignore = ['E501', 'UP006', 'UP007', 'Q000', 'Q003' ]
-extend-exclude = ['tests/data']
-
-[tool.ruff.format]
-quote-style = "single"
-indent-style = "space"
-skip-magic-trailing-comma = false
-line-ending = "auto"
-
-[tool.mypy]
-plugins = "pydantic.mypy"
-
-ignore_missing_imports = true
-follow_imports = "silent"
-strict_optional = true
-warn_redundant_casts = true
-warn_unused_ignores = true
-disallow_any_generics = true
-check_untyped_defs = true
-no_implicit_reexport = true
-disallow_untyped_defs = true
-
-[tool.pydantic-mypy]
-init_forbid_extra = true
-init_typed = true
-warn_required_dynamic_aliases = false
-warn_untyped_fields = true
+line-length = 120
+extend-exclude = [ "tests/data" ]
+format.preview = true
+format.docstring-code-format = true
+lint.select = [
+  "ALL",
+]
+lint.ignore = [
+  "ANN401", # Any as type annotation is allowed
+  "C901",   # complex structure
+  "COM812", # Conflict with formatter
+  "CPY",    # No copyright statements
+  "D203",   # one-blank-line-before-class (conflicts with D211)
+  "D212",   # multi-line-summary-first-line (conflicts with D213)
+  "DOC",    # limited documentation
+  "FIX002", # line contains to do
+  "ISC001", # Conflict with formatter
+  "S101",   # can use assert
+  "TD002",  # missing to do author
+  "TD003",  # missing to do link
+  "TD004",  # missing colon in to do
+]
+lint.per-file-ignores."scripts/*.py" = [
+  "INP001",  # no implicit namespace
+  "PLR0912", # too many branches
+  "PLR0914", # too many local variables
+  "PLR0915", # too many statements
+  "S",       # subprocess security is fine for build scripts
+  "T201",    # print is fine for CLI scripts
+]
+lint.per-file-ignores."tests/**/*.py" = [
+  "FBT",     # don't care about booleans as positional arguments in tests
+  "INP001",  # no implicit namespace
+  "PLC0415", # local imports in tests are fine
+  "PLC2701", # private import is fine
+  "PLR0913", # as many arguments as want
+  "PLR0915", # can have longer test methods
+  "PLR0917", # as many arguments as want
+  "PLR2004", # Magic value used in comparison, consider replacing with a constant variable
+  "S",       # no safety concerns
+  "SLF001",  # can test private methods
+]
+lint.isort = { known-first-party = [
+  "datamodel_code_generator",
+  "tests",
+], required-imports = [
+  "from __future__ import annotations",
+] }
 
-[tool.pytest.ini_options]
-filterwarnings = "ignore::DeprecationWarning:distutils"
-norecursedirs = "tests/data/*"
+lint.pydocstyle.convention = "pep257"
+lint.preview = true
 
-[tool.coverage.run]
-source = ["datamodel_code_generator"]
-branch = true
-omit = ["scripts/*"]
-
-
-[tool.coverage.report]
-ignore_errors = true
-exclude_lines = [
-    "if self.debug:",
-    "pragma: no cover",
-    "raise NotImplementedError",
-    "if __name__ == .__main__.:",
-    "if TYPE_CHECKING:",
-    "if not TYPE_CHECKING:"]
+[tool.codespell]
+skip = '.git,*.lock,tests,docs/cli-reference'
 
-omit = ["tests/*"]
+[tool.pytest.ini_options]
+filterwarnings = [
+  "error",
+  "ignore:^.*The `parse_obj` method is deprecated; use `model_validate` instead.*",
+  "ignore:^.*The `__fields_set__` attribute is deprecated, use `model_fields_set` instead.*",
+  "ignore:^.*The `dict` method is deprecated; use `model_dump` instead.*",
+  "ignore:^.*The `copy` method is deprecated; use `model_copy` instead.*",
+  "ignore:^.*`--validation` option is deprecated.*",
+  "ignore:^.*Field name `name` is duplicated on Pet.*",
+  "ignore:^.*format of 'unknown-type' not understood for 'string' - using default.*",
+  "ignore:^.*unclosed file.*",
+  "ignore:^.*black doesn't support `experimental-string-processing` option for wrapping string literal in .*",
+  "ignore:^.*jsonschema.exceptions.RefResolutionError is deprecated as of version 4.18.0. If you wish to catch potential reference resolution errors, directly catch referencing.exceptions.Unresolvable..*",
+  "ignore:^.*`experimental string processing` has been included in `preview` and deprecated. Use `preview` instead..*",
+  "ignore:^.*No schemas found in components/schemas.*",
+  "ignore:^.*Dataclass .* has a field ordering conflict due to inheritance.*:UserWarning",
+]
+norecursedirs = [ "tests/data/*", ".tox" ]
+verbosity_assertions = 2
 
+[tool.coverage]
+html.skip_covered = false
+html.show_contexts = false
+paths.source = [
+  "src",
+  ".tox*/*/lib/python*/site-packages",
+  ".tox*\\*\\Lib\\site-packages",
+  "*/src",
+  "*\\src",
+]
+paths.other = [
+  ".",
+  "*/datamodel-code-generator",
+  "*\\datamodel-code-generator",
+]
+run.dynamic_context = "none"
+run.omit = [ "tests/data/*" ]
+report.fail_under = 88
+run.parallel = true
+run.plugins = [
+  "covdefaults",
+]
+covdefaults.subtract_omit = "*/__main__.py"
+
+[tool.pyright]
+reportPrivateImportUsage = false
 
 [tool.pydantic-pycharm-plugin]
 ignore-init-method-arguments = true
-
-[tool.pydantic-pycharm-plugin.parsable-types]
-# str field may parse int and float
-str = ["int", "float"]
-
-[tool.codespell]
-# Ref: https://github.com/codespell-project/codespell#using-a-config-file
-skip = '.git,*.lock,tests'
-# check-hidden = true
-# ignore-regex = ''
-# ignore-words-list = ''
+parsable-types.str = [ "int", "float" ]
diff -pruN 0.26.4-3/src/datamodel_code_generator/__init__.py 0.45.0-1/src/datamodel_code_generator/__init__.py
--- 0.26.4-3/src/datamodel_code_generator/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,875 @@
+"""Main module for datamodel-code-generator.
+
+Provides the main `generate()` function and related enums/exceptions for generating
+Python data models (Pydantic, dataclasses, TypedDict, msgspec) from various schema formats.
+"""
+
+from __future__ import annotations
+
+import contextlib
+import os
+import sys
+from collections.abc import Iterator, Mapping, Sequence
+from datetime import datetime, timezone
+from enum import Enum
+from pathlib import Path
+from typing import (
+    IO,
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    Final,
+    TextIO,
+    TypeVar,
+    Union,
+    cast,
+)
+from urllib.parse import ParseResult
+
+import yaml
+import yaml.parser
+from typing_extensions import TypeAlias, TypeAliasType, TypedDict
+
+import datamodel_code_generator.pydantic_patch  # noqa: F401
+from datamodel_code_generator.format import (
+    DEFAULT_FORMATTERS,
+    DatetimeClassType,
+    Formatter,
+    PythonVersion,
+    PythonVersionMin,
+)
+from datamodel_code_generator.parser import DefaultPutDict, LiteralType
+from datamodel_code_generator.util import PYDANTIC_V2, SafeLoader
+
+if TYPE_CHECKING:
+    from collections import defaultdict
+
+    from datamodel_code_generator.model.pydantic_v2 import UnionMode
+    from datamodel_code_generator.parser.base import Parser
+    from datamodel_code_generator.types import StrictTypes
+
+    YamlScalar: TypeAlias = Union[str, int, float, bool, None]
+    YamlValue = TypeAliasType("YamlValue", "Union[dict[str, YamlValue], list[YamlValue], YamlScalar]")
+
+MIN_VERSION: Final[int] = 9
+MAX_VERSION: Final[int] = 13
+DEFAULT_SHARED_MODULE_NAME: Final[str] = "shared"
+
+T = TypeVar("T")
+
+
+class DataclassArguments(TypedDict, total=False):
+    """Arguments for @dataclass decorator."""
+
+    init: bool
+    repr: bool
+    eq: bool
+    order: bool
+    unsafe_hash: bool
+    frozen: bool
+    match_args: bool
+    kw_only: bool
+    slots: bool
+    weakref_slot: bool
+
+
+if not TYPE_CHECKING:
+    YamlScalar: TypeAlias = Union[str, int, float, bool, None]
+    if PYDANTIC_V2:
+        YamlValue = TypeAliasType("YamlValue", "Union[dict[str, YamlValue], list[YamlValue], YamlScalar]")
+    else:
+        # Pydantic v1 cannot handle TypeAliasType, use Any for recursive parts
+        YamlValue: TypeAlias = Union[dict[str, Any], list[Any], YamlScalar]
+
+try:
+    import pysnooper
+
+    pysnooper.tracer.DISABLED = True
+except ImportError:  # pragma: no cover
+    pysnooper = None
+
+DEFAULT_BASE_CLASS: str = "pydantic.BaseModel"
+
+
+def load_yaml(stream: str | TextIO) -> YamlValue:
+    """Load YAML content from a string or file-like object."""
+    return yaml.load(stream, Loader=SafeLoader)  # noqa: S506
+
+
+def load_yaml_dict(stream: str | TextIO) -> dict[str, YamlValue]:
+    """Load YAML and return as dict. Raises TypeError if result is not a dict."""
+    result = load_yaml(stream)
+    if not isinstance(result, dict):
+        msg = f"Expected dict, got {type(result).__name__}"
+        raise TypeError(msg)
+    return result
+
+
+def load_yaml_dict_from_path(path: Path, encoding: str) -> dict[str, YamlValue]:
+    """Load YAML and return as dict from a file path."""
+    with path.open(encoding=encoding) as f:
+        return load_yaml_dict(f)
+
+
+def get_version() -> str:
+    """Return the installed package version."""
+    package = "datamodel-code-generator"
+
+    from importlib.metadata import version  # noqa: PLC0415
+
+    return version(package)
+
+
+def enable_debug_message() -> None:  # pragma: no cover
+    """Enable debug tracing with pysnooper."""
+    if not pysnooper:
+        msg = "Please run `$pip install 'datamodel-code-generator[debug]'` to use debug option"
+        raise Exception(msg)  # noqa: TRY002
+
+    pysnooper.tracer.DISABLED = False
+
+
+DEFAULT_MAX_VARIABLE_LENGTH: int = 100
+
+
+def snooper_to_methods() -> Callable[..., Any]:
+    """Class decorator to add pysnooper tracing to all methods."""
+
+    def inner(cls: type[T]) -> type[T]:
+        if not pysnooper:
+            return cls
+        import inspect  # noqa: PLC0415
+
+        methods = inspect.getmembers(cls, predicate=inspect.isfunction)
+        for name, method in methods:
+            snooper_method = pysnooper.snoop(max_variable_length=DEFAULT_MAX_VARIABLE_LENGTH)(method)
+            setattr(cls, name, snooper_method)
+        return cls
+
+    return inner
+
+
+@contextlib.contextmanager
+def chdir(path: Path | None) -> Iterator[None]:
+    """Change working directory and return to previous on exit."""
+    if path is None:
+        yield
+    else:
+        prev_cwd = Path.cwd()
+        try:
+            os.chdir(path if path.is_dir() else path.parent)
+            yield
+        finally:
+            os.chdir(prev_cwd)
+
+
+def is_openapi(data: dict) -> bool:
+    """Check if the data dict is an OpenAPI specification."""
+    return "openapi" in data
+
+
+JSON_SCHEMA_URLS: tuple[str, ...] = (
+    "http://json-schema.org/",
+    "https://json-schema.org/",
+)
+
+
+def is_schema(data: dict) -> bool:
+    """Check if the data dict is a JSON Schema."""
+    schema = data.get("$schema")
+    if isinstance(schema, str) and any(schema.startswith(u) for u in JSON_SCHEMA_URLS):  # pragma: no cover
+        return True
+    if isinstance(data.get("type"), str):
+        return True
+    if any(
+        isinstance(data.get(o), list)
+        for o in (
+            "allOf",
+            "anyOf",
+            "oneOf",
+        )
+    ):
+        return True
+    return isinstance(data.get("properties"), dict)
+
+
+class InputFileType(Enum):
+    """Supported input file types for schema parsing."""
+
+    Auto = "auto"
+    OpenAPI = "openapi"
+    JsonSchema = "jsonschema"
+    Json = "json"
+    Yaml = "yaml"
+    Dict = "dict"
+    CSV = "csv"
+    GraphQL = "graphql"
+
+
+RAW_DATA_TYPES: list[InputFileType] = [
+    InputFileType.Json,
+    InputFileType.Yaml,
+    InputFileType.Dict,
+    InputFileType.CSV,
+    InputFileType.GraphQL,
+]
+
+
+class DataModelType(Enum):
+    """Supported output data model types."""
+
+    PydanticBaseModel = "pydantic.BaseModel"
+    PydanticV2BaseModel = "pydantic_v2.BaseModel"
+    DataclassesDataclass = "dataclasses.dataclass"
+    TypingTypedDict = "typing.TypedDict"
+    MsgspecStruct = "msgspec.Struct"
+
+
+class ReuseScope(Enum):
+    """Scope for model reuse deduplication.
+
+    module: Deduplicate identical models within each module (default).
+    tree: Deduplicate identical models across all modules, placing shared models in shared.py.
+    """
+
+    Module = "module"
+    Tree = "tree"
+
+
+class OpenAPIScope(Enum):
+    """Scopes for OpenAPI model generation."""
+
+    Schemas = "schemas"
+    Paths = "paths"
+    Tags = "tags"
+    Parameters = "parameters"
+    Webhooks = "webhooks"
+
+
+class AllExportsScope(Enum):
+    """Scope for __all__ exports in __init__.py.
+
+    children: Export models from direct child modules only.
+    recursive: Export models from all descendant modules recursively.
+    """
+
+    Children = "children"
+    Recursive = "recursive"
+
+
+class AllExportsCollisionStrategy(Enum):
+    """Strategy for handling name collisions in recursive exports.
+
+    error: Raise an error when name collision is detected.
+    minimal_prefix: Add module prefix only to colliding names.
+    full_prefix: Add full module path prefix to all colliding names.
+    """
+
+    Error = "error"
+    MinimalPrefix = "minimal-prefix"
+    FullPrefix = "full-prefix"
+
+
+class AllOfMergeMode(Enum):
+    """Mode for field merging in allOf schemas.
+
+    constraints: Merge only constraint fields (minItems, maxItems, pattern, etc.) from parent.
+    all: Merge constraints plus annotation fields (default, examples) from parent.
+    none: Do not merge any fields from parent properties.
+    """
+
+    Constraints = "constraints"
+    All = "all"
+    NoMerge = "none"
+
+
+class GraphQLScope(Enum):
+    """Scopes for GraphQL model generation."""
+
+    Schema = "schema"
+
+
+class ReadOnlyWriteOnlyModelType(Enum):
+    """Model generation strategy for readOnly/writeOnly fields.
+
+    RequestResponse: Generate only Request/Response model variants (no base model).
+    All: Generate Base, Request, and Response models.
+    """
+
+    RequestResponse = "request-response"
+    All = "all"
+
+
+class ModuleSplitMode(Enum):
+    """Mode for splitting generated models into separate files.
+
+    Single: Generate one file per model class.
+    """
+
+    Single = "single"
+
+
+class Error(Exception):
+    """Base exception for datamodel-code-generator errors."""
+
+    def __init__(self, message: str) -> None:
+        """Initialize with message."""
+        self.message: str = message
+
+    def __str__(self) -> str:
+        """Return string representation."""
+        return self.message
+
+
+class InvalidClassNameError(Error):
+    """Raised when a schema title cannot be converted to a valid Python class name."""
+
+    def __init__(self, class_name: str) -> None:
+        """Initialize with class name."""
+        self.class_name = class_name
+        message = f"title={class_name!r} is invalid class name."
+        super().__init__(message=message)
+
+
+def get_first_file(path: Path) -> Path:  # pragma: no cover
+    """Find and return the first file in a path (file or directory)."""
+    if path.is_file():
+        return path
+    if path.is_dir():
+        for child in path.rglob("*"):
+            if child.is_file():
+                return child
+    msg = f"No file found in: {path}"
+    raise FileNotFoundError(msg)
+
+
+def _find_future_import_insertion_point(header: str) -> int:
+    """Find position in header where __future__ import should be inserted."""
+    import ast  # noqa: PLC0415
+
+    try:
+        tree = ast.parse(header)
+    except SyntaxError:
+        return 0
+
+    lines = header.splitlines(keepends=True)
+
+    def line_end_pos(line_num: int) -> int:
+        return sum(len(lines[i]) for i in range(line_num))
+
+    if not tree.body:
+        return len(header)
+
+    first_stmt = tree.body[0]
+    is_docstring = isinstance(first_stmt, ast.Expr) and (
+        (isinstance(first_stmt.value, ast.Constant) and isinstance(first_stmt.value.value, str))
+        or isinstance(first_stmt.value, ast.JoinedStr)
+    )
+    if is_docstring:
+        end_line = first_stmt.end_lineno or len(lines)
+        pos = line_end_pos(end_line)
+        while end_line < len(lines) and not lines[end_line].strip():
+            pos += len(lines[end_line])
+            end_line += 1
+        return pos
+
+    pos = 0
+    for i in range(first_stmt.lineno - 1):
+        pos += len(lines[i])
+    return pos
+
+
+def generate(  # noqa: PLR0912, PLR0913, PLR0914, PLR0915
+    input_: Path | str | ParseResult | Mapping[str, Any],
+    *,
+    input_filename: str | None = None,
+    input_file_type: InputFileType = InputFileType.Auto,
+    output: Path | None = None,
+    output_model_type: DataModelType = DataModelType.PydanticBaseModel,
+    target_python_version: PythonVersion = PythonVersionMin,
+    base_class: str = "",
+    additional_imports: list[str] | None = None,
+    custom_template_dir: Path | None = None,
+    extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+    validation: bool = False,
+    field_constraints: bool = False,
+    snake_case_field: bool = False,
+    strip_default_none: bool = False,
+    aliases: Mapping[str, str] | None = None,
+    disable_timestamp: bool = False,
+    enable_version_header: bool = False,
+    enable_command_header: bool = False,
+    command_line: str | None = None,
+    allow_population_by_field_name: bool = False,
+    allow_extra_fields: bool = False,
+    extra_fields: str | None = None,
+    apply_default_values_for_required_fields: bool = False,
+    force_optional_for_required_fields: bool = False,
+    class_name: str | None = None,
+    use_standard_collections: bool = False,
+    use_schema_description: bool = False,
+    use_field_description: bool = False,
+    use_attribute_docstrings: bool = False,
+    use_inline_field_description: bool = False,
+    use_default_kwarg: bool = False,
+    reuse_model: bool = False,
+    reuse_scope: ReuseScope = ReuseScope.Module,
+    shared_module_name: str = DEFAULT_SHARED_MODULE_NAME,
+    encoding: str = "utf-8",
+    enum_field_as_literal: LiteralType | None = None,
+    use_one_literal_as_default: bool = False,
+    use_enum_values_in_discriminator: bool = False,
+    set_default_enum_member: bool = False,
+    use_subclass_enum: bool = False,
+    use_specialized_enum: bool = True,
+    strict_nullable: bool = False,
+    use_generic_container_types: bool = False,
+    enable_faux_immutability: bool = False,
+    disable_appending_item_suffix: bool = False,
+    strict_types: Sequence[StrictTypes] | None = None,
+    empty_enum_field_name: str | None = None,
+    custom_class_name_generator: Callable[[str], str] | None = None,
+    field_extra_keys: set[str] | None = None,
+    field_include_all_keys: bool = False,
+    field_extra_keys_without_x_prefix: set[str] | None = None,
+    openapi_scopes: list[OpenAPIScope] | None = None,
+    include_path_parameters: bool = False,
+    graphql_scopes: list[GraphQLScope] | None = None,  # noqa: ARG001
+    wrap_string_literal: bool | None = None,
+    use_title_as_name: bool = False,
+    use_operation_id_as_name: bool = False,
+    use_unique_items_as_set: bool = False,
+    allof_merge_mode: AllOfMergeMode = AllOfMergeMode.Constraints,
+    http_headers: Sequence[tuple[str, str]] | None = None,
+    http_ignore_tls: bool = False,
+    use_annotated: bool = False,
+    use_serialize_as_any: bool = False,
+    use_non_positive_negative_number_constrained_types: bool = False,
+    use_decimal_for_multiple_of: bool = False,
+    original_field_name_delimiter: str | None = None,
+    use_double_quotes: bool = False,
+    use_union_operator: bool = False,
+    collapse_root_models: bool = False,
+    skip_root_model: bool = False,
+    use_type_alias: bool = False,
+    special_field_name_prefix: str | None = None,
+    remove_special_field_name_prefix: bool = False,
+    capitalise_enum_members: bool = False,
+    keep_model_order: bool = False,
+    custom_file_header: str | None = None,
+    custom_file_header_path: Path | None = None,
+    custom_formatters: list[str] | None = None,
+    custom_formatters_kwargs: dict[str, Any] | None = None,
+    use_pendulum: bool = False,
+    http_query_parameters: Sequence[tuple[str, str]] | None = None,
+    treat_dot_as_module: bool = False,
+    use_exact_imports: bool = False,
+    union_mode: UnionMode | None = None,
+    output_datetime_class: DatetimeClassType | None = None,
+    keyword_only: bool = False,
+    frozen_dataclasses: bool = False,
+    no_alias: bool = False,
+    use_frozen_field: bool = False,
+    formatters: list[Formatter] = DEFAULT_FORMATTERS,
+    settings_path: Path | None = None,
+    parent_scoped_naming: bool = False,
+    dataclass_arguments: DataclassArguments | None = None,
+    disable_future_imports: bool = False,
+    type_mappings: list[str] | None = None,
+    read_only_write_only_model_type: ReadOnlyWriteOnlyModelType | None = None,
+    all_exports_scope: AllExportsScope | None = None,
+    all_exports_collision_strategy: AllExportsCollisionStrategy | None = None,
+    module_split_mode: ModuleSplitMode | None = None,
+) -> None:
+    """Generate Python data models from schema definitions or structured data.
+
+    This is the main entry point for code generation. Supports OpenAPI, JSON Schema,
+    GraphQL, and raw data formats (JSON, YAML, Dict, CSV) as input.
+    """
+    remote_text_cache: DefaultPutDict[str, str] = DefaultPutDict()
+    if isinstance(input_, str):
+        input_text: str | None = input_
+    elif isinstance(input_, ParseResult):
+        from datamodel_code_generator.http import get_body  # noqa: PLC0415
+
+        input_text = remote_text_cache.get_or_put(
+            input_.geturl(),
+            default_factory=lambda url: get_body(url, http_headers, http_ignore_tls, http_query_parameters),
+        )
+    else:
+        input_text = None
+
+    if dataclass_arguments is None:
+        dataclass_arguments = {}
+        if frozen_dataclasses:
+            dataclass_arguments["frozen"] = True
+        if keyword_only:
+            dataclass_arguments["kw_only"] = True
+
+    if isinstance(input_, Path) and not input_.is_absolute():
+        input_ = input_.expanduser().resolve()
+    if input_file_type == InputFileType.Auto:
+        try:
+            input_text_ = (
+                get_first_file(input_).read_text(encoding=encoding) if isinstance(input_, Path) else input_text
+            )
+        except FileNotFoundError as exc:
+            msg = "File not found"
+            raise Error(msg) from exc
+
+        try:
+            assert isinstance(input_text_, str)
+            input_file_type = infer_input_type(input_text_)
+        except Exception as exc:
+            msg = "Invalid file format"
+            raise Error(msg) from exc
+        else:
+            print(  # noqa: T201
+                inferred_message.format(input_file_type.value),
+                file=sys.stderr,
+            )
+
+    kwargs: dict[str, Any] = {}
+    if input_file_type == InputFileType.OpenAPI:  # noqa: PLR1702
+        from datamodel_code_generator.parser.openapi import OpenAPIParser  # noqa: PLC0415
+
+        parser_class: type[Parser] = OpenAPIParser
+        kwargs["openapi_scopes"] = openapi_scopes
+        kwargs["include_path_parameters"] = include_path_parameters
+    elif input_file_type == InputFileType.GraphQL:
+        from datamodel_code_generator.parser.graphql import GraphQLParser  # noqa: PLC0415
+
+        parser_class: type[Parser] = GraphQLParser
+    else:
+        from datamodel_code_generator.parser.jsonschema import JsonSchemaParser  # noqa: PLC0415
+
+        parser_class = JsonSchemaParser
+
+        if input_file_type in RAW_DATA_TYPES:
+            import json  # noqa: PLC0415
+
+            try:
+                if isinstance(input_, Path) and input_.is_dir():  # pragma: no cover
+                    msg = f"Input must be a file for {input_file_type}"
+                    raise Error(msg)  # noqa: TRY301
+                obj: dict[str, Any]
+                if input_file_type == InputFileType.CSV:
+                    import csv  # noqa: PLC0415
+
+                    def get_header_and_first_line(csv_file: IO[str]) -> dict[str, Any]:
+                        csv_reader = csv.DictReader(csv_file)
+                        assert csv_reader.fieldnames is not None
+                        return dict(zip(csv_reader.fieldnames, next(csv_reader)))
+
+                    if isinstance(input_, Path):
+                        with input_.open(encoding=encoding) as f:
+                            obj = get_header_and_first_line(f)
+                    else:
+                        import io  # noqa: PLC0415
+
+                        obj = get_header_and_first_line(io.StringIO(input_text))
+                elif input_file_type == InputFileType.Yaml:
+                    if isinstance(input_, Path):
+                        obj = load_yaml_dict(input_.read_text(encoding=encoding))
+                    else:  # pragma: no cover
+                        assert input_text is not None
+                        obj = load_yaml_dict(input_text)
+                elif input_file_type == InputFileType.Json:
+                    if isinstance(input_, Path):
+                        obj = json.loads(input_.read_text(encoding=encoding))
+                    else:
+                        assert input_text is not None
+                        obj = json.loads(input_text)
+                elif input_file_type == InputFileType.Dict:
+                    import ast  # noqa: PLC0415
+
+                    # Input can be a dict object stored in a python file
+                    obj = (
+                        ast.literal_eval(input_.read_text(encoding=encoding))
+                        if isinstance(input_, Path)
+                        else cast("dict[str, Any]", input_)
+                    )
+                else:  # pragma: no cover
+                    msg = f"Unsupported input file type: {input_file_type}"
+                    raise Error(msg)  # noqa: TRY301
+            except Exception as exc:
+                msg = "Invalid file format"
+                raise Error(msg) from exc
+
+            from genson import SchemaBuilder  # noqa: PLC0415
+
+            builder = SchemaBuilder()
+            builder.add_object(obj)
+            input_text = json.dumps(builder.to_schema())
+
+    if isinstance(input_, ParseResult) and input_file_type not in RAW_DATA_TYPES:
+        input_text = None
+
+    if union_mode is not None:
+        if output_model_type == DataModelType.PydanticV2BaseModel:
+            default_field_extras = {"union_mode": union_mode}
+        else:  # pragma: no cover
+            msg = "union_mode is only supported for pydantic_v2.BaseModel"
+            raise Error(msg)
+    else:
+        default_field_extras = None
+
+    from datamodel_code_generator.model import get_data_model_types  # noqa: PLC0415
+
+    data_model_types = get_data_model_types(output_model_type, target_python_version, use_type_alias=use_type_alias)
+
+    # Add GraphQL-specific model types if needed
+    if input_file_type == InputFileType.GraphQL:
+        kwargs["data_model_scalar_type"] = data_model_types.scalar_model
+        kwargs["data_model_union_type"] = data_model_types.union_model
+
+    source = input_text or input_
+    assert not isinstance(source, Mapping)
+    parser = parser_class(
+        source=source,
+        data_model_type=data_model_types.data_model,
+        data_model_root_type=data_model_types.root_model,
+        data_model_field_type=data_model_types.field_model,
+        data_type_manager_type=data_model_types.data_type_manager,
+        base_class=base_class,
+        additional_imports=additional_imports,
+        custom_template_dir=custom_template_dir,
+        extra_template_data=extra_template_data,
+        target_python_version=target_python_version,
+        dump_resolve_reference_action=data_model_types.dump_resolve_reference_action,
+        validation=validation,
+        field_constraints=field_constraints,
+        snake_case_field=snake_case_field,
+        strip_default_none=strip_default_none,
+        aliases=aliases,
+        allow_population_by_field_name=allow_population_by_field_name,
+        allow_extra_fields=allow_extra_fields,
+        extra_fields=extra_fields,
+        apply_default_values_for_required_fields=apply_default_values_for_required_fields,
+        force_optional_for_required_fields=force_optional_for_required_fields,
+        class_name=class_name,
+        use_standard_collections=use_standard_collections,
+        base_path=input_.parent if isinstance(input_, Path) and input_.is_file() else None,
+        use_schema_description=use_schema_description,
+        use_field_description=use_field_description,
+        use_attribute_docstrings=use_attribute_docstrings,
+        use_inline_field_description=use_inline_field_description,
+        use_default_kwarg=use_default_kwarg,
+        reuse_model=reuse_model,
+        reuse_scope=reuse_scope,
+        shared_module_name=shared_module_name,
+        enum_field_as_literal=LiteralType.All
+        if output_model_type == DataModelType.TypingTypedDict
+        else enum_field_as_literal,
+        use_one_literal_as_default=use_one_literal_as_default,
+        use_enum_values_in_discriminator=use_enum_values_in_discriminator,
+        set_default_enum_member=True
+        if output_model_type == DataModelType.DataclassesDataclass
+        else set_default_enum_member,
+        use_subclass_enum=use_subclass_enum,
+        use_specialized_enum=use_specialized_enum,
+        strict_nullable=strict_nullable,
+        use_generic_container_types=use_generic_container_types,
+        enable_faux_immutability=enable_faux_immutability,
+        remote_text_cache=remote_text_cache,
+        disable_appending_item_suffix=disable_appending_item_suffix,
+        strict_types=strict_types,
+        empty_enum_field_name=empty_enum_field_name,
+        custom_class_name_generator=custom_class_name_generator,
+        field_extra_keys=field_extra_keys,
+        field_include_all_keys=field_include_all_keys,
+        field_extra_keys_without_x_prefix=field_extra_keys_without_x_prefix,
+        wrap_string_literal=wrap_string_literal,
+        use_title_as_name=use_title_as_name,
+        use_operation_id_as_name=use_operation_id_as_name,
+        use_unique_items_as_set=use_unique_items_as_set,
+        allof_merge_mode=allof_merge_mode,
+        http_headers=http_headers,
+        http_ignore_tls=http_ignore_tls,
+        use_annotated=use_annotated,
+        use_serialize_as_any=use_serialize_as_any,
+        use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
+        use_decimal_for_multiple_of=use_decimal_for_multiple_of,
+        original_field_name_delimiter=original_field_name_delimiter,
+        use_double_quotes=use_double_quotes,
+        use_union_operator=use_union_operator,
+        collapse_root_models=collapse_root_models,
+        skip_root_model=skip_root_model,
+        use_type_alias=use_type_alias,
+        special_field_name_prefix=special_field_name_prefix,
+        remove_special_field_name_prefix=remove_special_field_name_prefix,
+        capitalise_enum_members=capitalise_enum_members,
+        keep_model_order=keep_model_order,
+        known_third_party=data_model_types.known_third_party,
+        custom_formatters=custom_formatters,
+        custom_formatters_kwargs=custom_formatters_kwargs,
+        use_pendulum=use_pendulum,
+        http_query_parameters=http_query_parameters,
+        treat_dot_as_module=treat_dot_as_module,
+        use_exact_imports=use_exact_imports,
+        default_field_extras=default_field_extras,
+        target_datetime_class=output_datetime_class,
+        keyword_only=keyword_only,
+        frozen_dataclasses=frozen_dataclasses,
+        no_alias=no_alias,
+        use_frozen_field=use_frozen_field,
+        formatters=formatters,
+        encoding=encoding,
+        parent_scoped_naming=parent_scoped_naming,
+        dataclass_arguments=dataclass_arguments,
+        type_mappings=type_mappings,
+        read_only_write_only_model_type=read_only_write_only_model_type,
+        **kwargs,
+    )
+
+    with chdir(output):
+        results = parser.parse(
+            settings_path=settings_path,
+            disable_future_imports=disable_future_imports,
+            all_exports_scope=all_exports_scope,
+            all_exports_collision_strategy=all_exports_collision_strategy,
+            module_split_mode=module_split_mode,
+        )
+    if not input_filename:  # pragma: no cover
+        if isinstance(input_, str):
+            input_filename = "<stdin>"
+        elif isinstance(input_, ParseResult):
+            input_filename = input_.geturl()
+        elif input_file_type == InputFileType.Dict:
+            # input_ might be a dict object provided directly, and missing a name field
+            input_filename = getattr(input_, "name", "<dict>")
+        else:
+            assert isinstance(input_, Path)
+            input_filename = input_.name
+    if not results:
+        msg = "Models not found in the input data"
+        raise Error(msg)
+    if isinstance(results, str):
+        # Single-file output: body already contains future imports
+        # Only store future_imports separately if we have a non-empty custom_file_header
+        body = results
+        future_imports = ""
+        modules: dict[Path | None, tuple[str, str, str | None]] = {output: (body, future_imports, input_filename)}
+    else:
+        if output is None:
+            msg = "Modular references require an output directory"
+            raise Error(msg)
+        if output.suffix:
+            msg = "Modular references require an output directory, not a file"
+            raise Error(msg)
+        modules = {
+            output.joinpath(*name): (
+                result.body,
+                result.future_imports,
+                str(result.source.as_posix() if result.source else input_filename),
+            )
+            for name, result in sorted(results.items())
+        }
+
+    timestamp = datetime.now(timezone.utc).replace(microsecond=0).isoformat()
+
+    if custom_file_header is None and custom_file_header_path:
+        custom_file_header = custom_file_header_path.read_text(encoding=encoding)
+
+    header = """\
+# generated by datamodel-codegen:
+#   filename:  {}"""
+    if not disable_timestamp:
+        header += f"\n#   timestamp: {timestamp}"
+    if enable_version_header:
+        header += f"\n#   version:   {get_version()}"
+    if enable_command_header and command_line:
+        safe_command_line = command_line.replace("\n", " ").replace("\r", " ")
+        header += f"\n#   command:   {safe_command_line}"
+
+    file: IO[Any] | None
+    for path, (body, future_imports, filename) in modules.items():
+        if path is None:
+            file = None
+        else:
+            if not path.parent.exists():
+                path.parent.mkdir(parents=True)
+            file = path.open("wt", encoding=encoding)
+
+        safe_filename = filename.replace("\n", " ").replace("\r", " ") if filename else ""
+        effective_header = custom_file_header or header.format(safe_filename)
+
+        if custom_file_header and body:
+            # Extract future imports from body for correct placement after custom_file_header
+            body_without_future = body
+            extracted_future = future_imports  # Use pre-extracted if available
+            lines = body.split("\n")
+            future_indices = [i for i, line in enumerate(lines) if line.strip().startswith("from __future__")]
+            if future_indices:
+                if not extracted_future:
+                    # Extract future imports from body
+                    extracted_future = "\n".join(lines[i] for i in future_indices)
+                remaining_lines = [line for i, line in enumerate(lines) if i not in future_indices]
+                body_without_future = "\n".join(remaining_lines).lstrip("\n")
+
+            if extracted_future:
+                insertion_point = _find_future_import_insertion_point(custom_file_header)
+                header_before = custom_file_header[:insertion_point].rstrip()
+                header_after = custom_file_header[insertion_point:].strip()
+                if header_after:
+                    content = header_before + "\n" + extracted_future + "\n\n" + header_after
+                else:
+                    content = header_before + "\n\n" + extracted_future
+                print(content, file=file)
+                print(file=file)
+                print(body_without_future.rstrip(), file=file)
+            else:
+                print(effective_header, file=file)
+                print(file=file)
+                print(body.rstrip(), file=file)
+        else:
+            # Body already contains future imports, just print as-is
+            print(effective_header, file=file)
+            if body:
+                print(file=file)
+                print(body.rstrip(), file=file)
+
+        if file is not None:
+            file.close()
+
+
+def infer_input_type(text: str) -> InputFileType:
+    """Automatically detect the input file type from text content."""
+    try:
+        data = load_yaml(text)
+    except yaml.parser.ParserError:
+        return InputFileType.CSV
+    if isinstance(data, dict):
+        if is_openapi(data):
+            return InputFileType.OpenAPI
+        if is_schema(data):
+            return InputFileType.JsonSchema
+        return InputFileType.Json
+    msg = (
+        "Can't infer input file type from the input data. "
+        "Please specify the input file type explicitly with --input-file-type option."
+    )
+    raise Error(msg)
+
+
+inferred_message = (
+    "The input file type was determined to be: {}\nThis can be specified explicitly with the "
+    "`--input-file-type` option."
+)
+
+__all__ = [
+    "MAX_VERSION",
+    "MIN_VERSION",
+    "AllExportsCollisionStrategy",
+    "AllExportsScope",
+    "DatetimeClassType",
+    "DefaultPutDict",
+    "Error",
+    "InputFileType",
+    "InvalidClassNameError",
+    "LiteralType",
+    "ModuleSplitMode",
+    "PythonVersion",
+    "ReadOnlyWriteOnlyModelType",
+    "generate",
+]
diff -pruN 0.26.4-3/src/datamodel_code_generator/__main__.py 0.45.0-1/src/datamodel_code_generator/__main__.py
--- 0.26.4-3/src/datamodel_code_generator/__main__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/__main__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,1017 @@
+"""Main module for datamodel-code-generator CLI."""
+
+from __future__ import annotations
+
+import difflib
+import json
+import shlex
+import signal
+import sys
+import tempfile
+import warnings
+from collections import defaultdict
+from collections.abc import Sequence  # noqa: TC003  # pydantic needs it
+from enum import IntEnum
+from io import TextIOBase
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, ClassVar, Optional, Union, cast
+from urllib.parse import ParseResult, urlparse
+
+import argcomplete
+from pydantic import BaseModel
+from typing_extensions import TypeAlias
+
+from datamodel_code_generator import (
+    DEFAULT_SHARED_MODULE_NAME,
+    AllExportsCollisionStrategy,
+    AllExportsScope,
+    AllOfMergeMode,
+    DataclassArguments,
+    DataModelType,
+    Error,
+    InputFileType,
+    InvalidClassNameError,
+    ModuleSplitMode,
+    OpenAPIScope,
+    ReadOnlyWriteOnlyModelType,
+    ReuseScope,
+    enable_debug_message,
+    generate,
+)
+from datamodel_code_generator.arguments import DEFAULT_ENCODING, arg_parser, namespace
+from datamodel_code_generator.format import (
+    DEFAULT_FORMATTERS,
+    DatetimeClassType,
+    Formatter,
+    PythonVersion,
+    PythonVersionMin,
+    _get_black,
+    is_supported_in_black,
+)
+from datamodel_code_generator.model.pydantic_v2 import UnionMode  # noqa: TC001 # needed for pydantic
+from datamodel_code_generator.parser import LiteralType  # noqa: TC001 # needed for pydantic
+from datamodel_code_generator.reference import is_url
+from datamodel_code_generator.types import StrictTypes  # noqa: TC001 # needed for pydantic
+from datamodel_code_generator.util import (
+    PYDANTIC_V2,
+    ConfigDict,
+    field_validator,
+    load_toml,
+    model_validator,
+)
+
+if TYPE_CHECKING:
+    from argparse import Namespace
+
+    from typing_extensions import Self
+
+# Options that should be excluded from pyproject.toml config generation
+EXCLUDED_CONFIG_OPTIONS: frozenset[str] = frozenset({
+    "check",
+    "generate_pyproject_config",
+    "generate_cli_command",
+    "ignore_pyproject",
+    "profile",
+    "version",
+    "help",
+    "debug",
+    "no_color",
+    "disable_warnings",
+    "watch",
+    "watch_delay",
+})
+
+BOOLEAN_OPTIONAL_OPTIONS: frozenset[str] = frozenset({
+    "use_specialized_enum",
+})
+
+
+class Exit(IntEnum):
+    """Exit reasons."""
+
+    OK = 0
+    DIFF = 1
+    ERROR = 2
+    KeyboardInterrupt = 3
+
+
+def sig_int_handler(_: int, __: Any) -> None:  # pragma: no cover
+    """Handle SIGINT signal gracefully."""
+    sys.exit(Exit.OK)
+
+
+signal.signal(signal.SIGINT, sig_int_handler)
+
+
+class Config(BaseModel):
+    """Configuration model for code generation."""
+
+    if PYDANTIC_V2:
+        model_config = ConfigDict(arbitrary_types_allowed=True)  # pyright: ignore[reportAssignmentType]
+
+        def get(self, item: str) -> Any:  # pragma: no cover
+            """Get attribute value by name."""
+            return getattr(self, item)
+
+        def __getitem__(self, item: str) -> Any:  # pragma: no cover
+            """Get item by key."""
+            return self.get(item)
+
+        @classmethod
+        def parse_obj(cls, obj: Any) -> Self:
+            """Parse object into Config model."""
+            return cls.model_validate(obj)
+
+        @classmethod
+        def get_fields(cls) -> dict[str, Any]:
+            """Get model fields."""
+            return cls.model_fields
+
+    else:
+
+        class Config:
+            """Pydantic v1 configuration."""
+
+            # Pydantic 1.5.1 doesn't support validate_assignment correctly
+            arbitrary_types_allowed = (TextIOBase,)
+
+        @classmethod
+        def get_fields(cls) -> dict[str, Any]:
+            """Get model fields."""
+            return cls.__fields__
+
+    @field_validator("aliases", "extra_template_data", "custom_formatters_kwargs", mode="before")
+    def validate_file(cls, value: Any) -> TextIOBase | None:  # noqa: N805
+        """Validate and open file path."""
+        if value is None:  # pragma: no cover
+            return value
+
+        path = Path(value)
+        if path.is_file():
+            return cast("TextIOBase", path.expanduser().resolve().open("rt"))
+
+        msg = f"A file was expected but {value} is not a file."  # pragma: no cover
+        raise Error(msg)  # pragma: no cover
+
+    @field_validator(
+        "input",
+        "output",
+        "custom_template_dir",
+        "custom_file_header_path",
+        mode="before",
+    )
+    def validate_path(cls, value: Any) -> Path | None:  # noqa: N805
+        """Validate and resolve path."""
+        if value is None or isinstance(value, Path):
+            return value  # pragma: no cover
+        return Path(value).expanduser().resolve()
+
+    @field_validator("url", mode="before")
+    def validate_url(cls, value: Any) -> ParseResult | None:  # noqa: N805
+        """Validate and parse URL."""
+        if isinstance(value, str) and is_url(value):  # pragma: no cover
+            return urlparse(value)
+        if value is None:  # pragma: no cover
+            return None
+        msg = f"Unsupported URL scheme. Supported: http, https, file. --input={value}"  # pragma: no cover
+        raise Error(msg)  # pragma: no cover
+
+    # Pydantic 1.5.1 doesn't support each_item=True correctly
+    @field_validator("http_headers", mode="before")
+    def validate_http_headers(cls, value: Any) -> list[tuple[str, str]] | None:  # noqa: N805
+        """Validate HTTP headers."""
+        if value is None:  # pragma: no cover
+            return None
+
+        def validate_each_item(each_item: str | tuple[str, str]) -> tuple[str, str]:
+            if isinstance(each_item, str):  # pragma: no cover
+                try:
+                    field_name, field_value = each_item.split(":", maxsplit=1)
+                    return field_name, field_value.lstrip()
+                except ValueError as exc:
+                    msg = f"Invalid http header: {each_item!r}"
+                    raise Error(msg) from exc
+            return each_item  # pragma: no cover
+
+        if isinstance(value, list):
+            return [validate_each_item(each_item) for each_item in value]
+        msg = f"Invalid http_headers value: {value!r}"  # pragma: no cover
+        raise Error(msg)  # pragma: no cover
+
+    @field_validator("http_query_parameters", mode="before")
+    def validate_http_query_parameters(cls, value: Any) -> list[tuple[str, str]] | None:  # noqa: N805
+        """Validate HTTP query parameters."""
+        if value is None:  # pragma: no cover
+            return None
+
+        def validate_each_item(each_item: str | tuple[str, str]) -> tuple[str, str]:
+            if isinstance(each_item, str):  # pragma: no cover
+                try:
+                    field_name, field_value = each_item.split("=", maxsplit=1)
+                    return field_name, field_value.lstrip()
+                except ValueError as exc:
+                    msg = f"Invalid http query parameter: {each_item!r}"
+                    raise Error(msg) from exc
+            return each_item  # pragma: no cover
+
+        if isinstance(value, list):
+            return [validate_each_item(each_item) for each_item in value]
+        msg = f"Invalid http_query_parameters value: {value!r}"  # pragma: no cover
+        raise Error(msg)  # pragma: no cover
+
+    @model_validator(mode="before")
+    def validate_additional_imports(cls, values: dict[str, Any]) -> dict[str, Any]:  # noqa: N805
+        """Validate and split additional imports."""
+        additional_imports = values.get("additional_imports")
+        if additional_imports is not None:
+            values["additional_imports"] = additional_imports.split(",")
+        return values
+
+    @model_validator(mode="before")
+    def validate_custom_formatters(cls, values: dict[str, Any]) -> dict[str, Any]:  # noqa: N805
+        """Validate and split custom formatters."""
+        custom_formatters = values.get("custom_formatters")
+        if custom_formatters is not None:
+            values["custom_formatters"] = custom_formatters.split(",")
+        return values
+
+    __validate_output_datetime_class_err: ClassVar[str] = (
+        '`--output-datetime-class` only allows "datetime" for '
+        f"`--output-model-type` {DataModelType.DataclassesDataclass.value}"
+    )
+
+    __validate_original_field_name_delimiter_err: ClassVar[str] = (
+        "`--original-field-name-delimiter` can not be used without `--snake-case-field`."
+    )
+
+    __validate_custom_file_header_err: ClassVar[str] = (
+        "`--custom_file_header_path` can not be used with `--custom_file_header`."
+    )
+    __validate_keyword_only_err: ClassVar[str] = (
+        f"`--keyword-only` requires `--target-python-version` {PythonVersion.PY_310.value} or higher."
+    )
+
+    __validate_all_exports_collision_strategy_err: ClassVar[str] = (
+        "`--all-exports-collision-strategy` can only be used with `--all-exports-scope=recursive`."
+    )
+
+    if PYDANTIC_V2:
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_output_datetime_class(self: Self) -> Self:  # pyright: ignore[reportRedeclaration]
+            """Validate output datetime class compatibility."""
+            datetime_class_type: DatetimeClassType | None = self.output_datetime_class
+            if (
+                datetime_class_type
+                and datetime_class_type is not DatetimeClassType.Datetime
+                and self.output_model_type == DataModelType.DataclassesDataclass
+            ):
+                raise Error(self.__validate_output_datetime_class_err)
+            return self
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_original_field_name_delimiter(self: Self) -> Self:  # pyright: ignore[reportRedeclaration]
+            """Validate original field name delimiter requires snake case."""
+            if self.original_field_name_delimiter is not None and not self.snake_case_field:
+                raise Error(self.__validate_original_field_name_delimiter_err)
+            return self
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_custom_file_header(self: Self) -> Self:  # pyright: ignore[reportRedeclaration]
+            """Validate custom file header options are mutually exclusive."""
+            if self.custom_file_header and self.custom_file_header_path:
+                raise Error(self.__validate_custom_file_header_err)
+            return self
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_keyword_only(self: Self) -> Self:  # pyright: ignore[reportRedeclaration]
+            """Validate keyword-only compatibility with target Python version."""
+            output_model_type: DataModelType = self.output_model_type
+            python_target: PythonVersion = self.target_python_version
+            if (
+                self.keyword_only
+                and output_model_type == DataModelType.DataclassesDataclass
+                and not python_target.has_kw_only_dataclass
+            ):
+                raise Error(self.__validate_keyword_only_err)
+            return self
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_root(self: Self) -> Self:  # pyright: ignore[reportRedeclaration]
+            """Validate root model configuration."""
+            if self.use_annotated:
+                self.field_constraints = True
+            return self
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_all_exports_collision_strategy(self: Self) -> Self:  # pyright: ignore[reportRedeclaration]
+            """Validate all_exports_collision_strategy requires recursive scope."""
+            if self.all_exports_collision_strategy is not None and self.all_exports_scope != AllExportsScope.Recursive:
+                raise Error(self.__validate_all_exports_collision_strategy_err)
+            return self
+
+    else:
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_output_datetime_class(cls, values: dict[str, Any]) -> dict[str, Any]:  # noqa: N805
+            """Validate output datetime class compatibility."""
+            datetime_class_type: DatetimeClassType | None = values.get("output_datetime_class")
+            if (
+                datetime_class_type
+                and datetime_class_type is not DatetimeClassType.Datetime
+                and values.get("output_model_type") == DataModelType.DataclassesDataclass
+            ):
+                raise Error(cls.__validate_output_datetime_class_err)
+            return values
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_original_field_name_delimiter(cls, values: dict[str, Any]) -> dict[str, Any]:  # noqa: N805
+            """Validate original field name delimiter requires snake case."""
+            if values.get("original_field_name_delimiter") is not None and not values.get("snake_case_field"):
+                raise Error(cls.__validate_original_field_name_delimiter_err)
+            return values
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_custom_file_header(cls, values: dict[str, Any]) -> dict[str, Any]:  # noqa: N805
+            """Validate custom file header options are mutually exclusive."""
+            if values.get("custom_file_header") and values.get("custom_file_header_path"):
+                raise Error(cls.__validate_custom_file_header_err)
+            return values
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_keyword_only(cls, values: dict[str, Any]) -> dict[str, Any]:  # noqa: N805
+            """Validate keyword-only compatibility with target Python version."""
+            output_model_type: DataModelType = cast("DataModelType", values.get("output_model_type"))
+            python_target: PythonVersion = cast("PythonVersion", values.get("target_python_version"))
+            if (
+                values.get("keyword_only")
+                and output_model_type == DataModelType.DataclassesDataclass
+                and not python_target.has_kw_only_dataclass
+            ):
+                raise Error(cls.__validate_keyword_only_err)
+            return values
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_root(cls, values: dict[str, Any]) -> dict[str, Any]:  # noqa: N805
+            """Validate root model configuration."""
+            if values.get("use_annotated"):
+                values["field_constraints"] = True
+            return values
+
+        @model_validator()  # pyright: ignore[reportArgumentType]
+        def validate_all_exports_collision_strategy(cls, values: dict[str, Any]) -> dict[str, Any]:  # noqa: N805
+            """Validate all_exports_collision_strategy requires recursive scope."""
+            if (
+                values.get("all_exports_collision_strategy") is not None
+                and values.get("all_exports_scope") != AllExportsScope.Recursive
+            ):
+                raise Error(cls.__validate_all_exports_collision_strategy_err)
+            return values
+
+    input: Optional[Union[Path, str]] = None  # noqa: UP007, UP045
+    input_file_type: InputFileType = InputFileType.Auto
+    output_model_type: DataModelType = DataModelType.PydanticBaseModel
+    output: Optional[Path] = None  # noqa: UP045
+    check: bool = False
+    debug: bool = False
+    disable_warnings: bool = False
+    target_python_version: PythonVersion = PythonVersionMin
+    base_class: str = ""
+    additional_imports: Optional[list[str]] = None  # noqa: UP045
+    custom_template_dir: Optional[Path] = None  # noqa: UP045
+    extra_template_data: Optional[TextIOBase] = None  # noqa: UP045
+    validation: bool = False
+    field_constraints: bool = False
+    snake_case_field: bool = False
+    strip_default_none: bool = False
+    aliases: Optional[TextIOBase] = None  # noqa: UP045
+    disable_timestamp: bool = False
+    enable_version_header: bool = False
+    enable_command_header: bool = False
+    allow_population_by_field_name: bool = False
+    allow_extra_fields: bool = False
+    extra_fields: Optional[str] = None  # noqa: UP045
+    use_default: bool = False
+    force_optional: bool = False
+    class_name: Optional[str] = None  # noqa: UP045
+    use_standard_collections: bool = False
+    use_schema_description: bool = False
+    use_field_description: bool = False
+    use_attribute_docstrings: bool = False
+    use_inline_field_description: bool = False
+    use_default_kwarg: bool = False
+    reuse_model: bool = False
+    reuse_scope: ReuseScope = ReuseScope.Module
+    shared_module_name: str = DEFAULT_SHARED_MODULE_NAME
+    encoding: str = DEFAULT_ENCODING
+    enum_field_as_literal: Optional[LiteralType] = None  # noqa: UP045
+    use_one_literal_as_default: bool = False
+    use_enum_values_in_discriminator: bool = False
+    set_default_enum_member: bool = False
+    use_subclass_enum: bool = False
+    use_specialized_enum: bool = True
+    strict_nullable: bool = False
+    use_generic_container_types: bool = False
+    use_union_operator: bool = False
+    enable_faux_immutability: bool = False
+    url: Optional[ParseResult] = None  # noqa: UP045
+    disable_appending_item_suffix: bool = False
+    strict_types: list[StrictTypes] = []
+    empty_enum_field_name: Optional[str] = None  # noqa: UP045
+    field_extra_keys: Optional[set[str]] = None  # noqa: UP045
+    field_include_all_keys: bool = False
+    field_extra_keys_without_x_prefix: Optional[set[str]] = None  # noqa: UP045
+    openapi_scopes: Optional[list[OpenAPIScope]] = [OpenAPIScope.Schemas]  # noqa: UP045
+    include_path_parameters: bool = False
+    wrap_string_literal: Optional[bool] = None  # noqa: UP045
+    use_title_as_name: bool = False
+    use_operation_id_as_name: bool = False
+    use_unique_items_as_set: bool = False
+    allof_merge_mode: AllOfMergeMode = AllOfMergeMode.Constraints
+    http_headers: Optional[Sequence[tuple[str, str]]] = None  # noqa: UP045
+    http_ignore_tls: bool = False
+    use_annotated: bool = False
+    use_serialize_as_any: bool = False
+    use_non_positive_negative_number_constrained_types: bool = False
+    use_decimal_for_multiple_of: bool = False
+    original_field_name_delimiter: Optional[str] = None  # noqa: UP045
+    use_double_quotes: bool = False
+    collapse_root_models: bool = False
+    skip_root_model: bool = False
+    use_type_alias: bool = False
+    special_field_name_prefix: Optional[str] = None  # noqa: UP045
+    remove_special_field_name_prefix: bool = False
+    capitalise_enum_members: bool = False
+    keep_model_order: bool = False
+    custom_file_header: Optional[str] = None  # noqa: UP045
+    custom_file_header_path: Optional[Path] = None  # noqa: UP045
+    custom_formatters: Optional[list[str]] = None  # noqa: UP045
+    custom_formatters_kwargs: Optional[TextIOBase] = None  # noqa: UP045
+    use_pendulum: bool = False
+    http_query_parameters: Optional[Sequence[tuple[str, str]]] = None  # noqa: UP045
+    treat_dot_as_module: bool = False
+    use_exact_imports: bool = False
+    union_mode: Optional[UnionMode] = None  # noqa: UP045
+    output_datetime_class: Optional[DatetimeClassType] = None  # noqa: UP045
+    keyword_only: bool = False
+    frozen_dataclasses: bool = False
+    dataclass_arguments: Optional[DataclassArguments] = None  # noqa: UP045
+    no_alias: bool = False
+    use_frozen_field: bool = False
+    formatters: list[Formatter] = DEFAULT_FORMATTERS
+    parent_scoped_naming: bool = False
+    disable_future_imports: bool = False
+    type_mappings: Optional[list[str]] = None  # noqa: UP045
+    read_only_write_only_model_type: Optional[ReadOnlyWriteOnlyModelType] = None  # noqa: UP045
+    all_exports_scope: Optional[AllExportsScope] = None  # noqa: UP045
+    all_exports_collision_strategy: Optional[AllExportsCollisionStrategy] = None  # noqa: UP045
+    module_split_mode: Optional[ModuleSplitMode] = None  # noqa: UP045
+    watch: bool = False
+    watch_delay: float = 0.5
+
+    def merge_args(self, args: Namespace) -> None:
+        """Merge command-line arguments into config."""
+        set_args = {f: getattr(args, f) for f in self.get_fields() if getattr(args, f) is not None}
+
+        if set_args.get("output_model_type") == DataModelType.MsgspecStruct.value:
+            set_args["use_annotated"] = True
+
+        if set_args.get("use_annotated"):
+            set_args["field_constraints"] = True
+
+        parsed_args = Config.parse_obj(set_args)
+        for field_name in set_args:
+            setattr(self, field_name, getattr(parsed_args, field_name))
+
+
+def _get_pyproject_toml_config(source: Path, profile: str | None = None) -> dict[str, Any]:
+    """Find and return the [tool.datamodel-codegen] section of the closest pyproject.toml if it exists."""
+    current_path = source
+    while current_path != current_path.parent:
+        if (current_path / "pyproject.toml").is_file():
+            pyproject_toml = load_toml(current_path / "pyproject.toml")
+            if "datamodel-codegen" in pyproject_toml.get("tool", {}):
+                tool_config = pyproject_toml["tool"]["datamodel-codegen"]
+
+                base_config: dict[str, Any] = {k: v for k, v in tool_config.items() if k != "profiles"}
+
+                if profile:
+                    profiles = tool_config.get("profiles", {})
+                    if profile not in profiles:
+                        available = list(profiles.keys()) if profiles else "none"
+                        msg = f"Profile '{profile}' not found in pyproject.toml. Available profiles: {available}"
+                        raise Error(msg)
+                    profile_config = profiles[profile]
+                    base_config.update(profile_config)
+
+                pyproject_config = {k.replace("-", "_"): v for k, v in base_config.items()}
+                # Replace US-american spelling if present (ignore if british spelling is present)
+                if (
+                    "capitalize_enum_members" in pyproject_config and "capitalise_enum_members" not in pyproject_config
+                ):  # pragma: no cover
+                    pyproject_config["capitalise_enum_members"] = pyproject_config.pop("capitalize_enum_members")
+                return pyproject_config
+
+        if (current_path / ".git").exists():
+            # Stop early if we see a git repository root.
+            break
+
+        current_path = current_path.parent
+
+    # If profile was requested but no pyproject.toml config was found, raise an error
+    if profile:
+        msg = f"Profile '{profile}' requested but no [tool.datamodel-codegen] section found in pyproject.toml"
+        raise Error(msg)
+
+    return {}
+
+
+TomlValue: TypeAlias = Union[str, bool, list["TomlValue"], tuple["TomlValue", ...]]
+
+
+def _format_toml_value(value: TomlValue) -> str:
+    """Format a Python value as a TOML value string."""
+    if isinstance(value, bool):
+        return "true" if value else "false"
+    if isinstance(value, str):
+        return f'"{value}"'
+    formatted_items = [_format_toml_value(item) for item in value]
+    return f"[{', '.join(formatted_items)}]"
+
+
+def generate_pyproject_config(args: Namespace) -> str:
+    """Generate pyproject.toml [tool.datamodel-codegen] section from CLI arguments."""
+    lines: list[str] = ["[tool.datamodel-codegen]"]
+
+    args_dict: dict[str, object] = vars(args)
+    for key, value in sorted(args_dict.items()):
+        if value is None:
+            continue
+        if key in EXCLUDED_CONFIG_OPTIONS:
+            continue
+
+        toml_key = key.replace("_", "-")
+        toml_value = _format_toml_value(cast("TomlValue", value))
+        lines.append(f"{toml_key} = {toml_value}")
+
+    return "\n".join(lines) + "\n"
+
+
+def _normalize_line_endings(text: str) -> str:
+    """Normalize line endings to LF for cross-platform comparison."""
+    return text.replace("\r\n", "\n")
+
+
+def _compare_single_file(
+    generated_path: Path,
+    actual_path: Path,
+    encoding: str,
+) -> tuple[bool, list[str]]:
+    """Compare generated file content with existing file.
+
+    Returns:
+        Tuple of (has_differences, diff_lines)
+        - has_differences: True if files differ or actual file doesn't exist
+        - diff_lines: List of diff lines for output
+    """
+    generated_content = _normalize_line_endings(generated_path.read_text(encoding=encoding))
+
+    if not actual_path.exists():
+        return True, [f"MISSING: {actual_path} (file does not exist but should be generated)"]
+
+    actual_content = _normalize_line_endings(actual_path.read_text(encoding=encoding))
+
+    if generated_content == actual_content:
+        return False, []
+
+    diff_lines = list(
+        difflib.unified_diff(
+            actual_content.splitlines(keepends=True),
+            generated_content.splitlines(keepends=True),
+            fromfile=str(actual_path),
+            tofile=f"{actual_path} (expected)",
+        )
+    )
+    return True, diff_lines
+
+
+def _compare_directories(
+    generated_dir: Path,
+    actual_dir: Path,
+    encoding: str,
+) -> tuple[list[str], list[str], list[str]]:
+    """Compare generated directory with existing directory."""
+    diffs: list[str] = []
+
+    generated_files = {path.relative_to(generated_dir) for path in generated_dir.rglob("*.py")}
+
+    actual_files: set[Path] = set()
+    if actual_dir.exists():
+        for path in actual_dir.rglob("*.py"):
+            if "__pycache__" not in path.parts:
+                actual_files.add(path.relative_to(actual_dir))
+
+    missing_files = [str(rel_path) for rel_path in sorted(generated_files - actual_files)]
+    extra_files = [str(rel_path) for rel_path in sorted(actual_files - generated_files)]
+
+    for rel_path in sorted(generated_files & actual_files):
+        generated_content = _normalize_line_endings((generated_dir / rel_path).read_text(encoding=encoding))
+        actual_content = _normalize_line_endings((actual_dir / rel_path).read_text(encoding=encoding))
+        if generated_content != actual_content:
+            diffs.extend(
+                difflib.unified_diff(
+                    actual_content.splitlines(keepends=True),
+                    generated_content.splitlines(keepends=True),
+                    fromfile=str(rel_path),
+                    tofile=f"{rel_path} (expected)",
+                )
+            )
+
+    return diffs, missing_files, extra_files
+
+
+def _format_cli_value(value: str | list[str]) -> str:
+    """Format a value for CLI argument."""
+    if isinstance(value, list):
+        return " ".join(f'"{v}"' if " " in v else v for v in value)
+    return f'"{value}"' if " " in value else value
+
+
+def generate_cli_command(config: dict[str, TomlValue]) -> str:
+    """Generate CLI command from pyproject.toml configuration."""
+    parts: list[str] = ["datamodel-codegen"]
+
+    for key, value in sorted(config.items()):
+        if key in EXCLUDED_CONFIG_OPTIONS:
+            continue
+
+        cli_key = key.replace("_", "-")
+
+        if isinstance(value, bool):
+            if value:
+                parts.append(f"--{cli_key}")
+            elif key in BOOLEAN_OPTIONAL_OPTIONS:
+                parts.append(f"--no-{cli_key}")
+        elif isinstance(value, list):
+            parts.extend((f"--{cli_key}", _format_cli_value(cast("list[str]", value))))
+        else:
+            parts.extend((f"--{cli_key}", _format_cli_value(str(value))))
+
+    return " ".join(parts) + "\n"
+
+
+def run_generate_from_config(  # noqa: PLR0913, PLR0917
+    config: Config,
+    input_: Path | str | ParseResult,
+    output: Path | None,
+    extra_template_data: dict[str, Any] | None,
+    aliases: dict[str, str] | None,
+    command_line: str | None,
+    custom_formatters_kwargs: dict[str, str] | None,
+    settings_path: Path | None = None,
+) -> None:
+    """Run code generation with the given config and parameters."""
+    generate(
+        input_=input_,
+        input_file_type=config.input_file_type,
+        output=output,
+        output_model_type=config.output_model_type,
+        target_python_version=config.target_python_version,
+        base_class=config.base_class,
+        additional_imports=config.additional_imports,
+        custom_template_dir=config.custom_template_dir,
+        validation=config.validation,
+        field_constraints=config.field_constraints,
+        snake_case_field=config.snake_case_field,
+        strip_default_none=config.strip_default_none,
+        extra_template_data=extra_template_data,  # pyright: ignore[reportArgumentType]
+        aliases=aliases,
+        disable_timestamp=config.disable_timestamp,
+        enable_version_header=config.enable_version_header,
+        enable_command_header=config.enable_command_header,
+        command_line=command_line,
+        allow_population_by_field_name=config.allow_population_by_field_name,
+        allow_extra_fields=config.allow_extra_fields,
+        extra_fields=config.extra_fields,
+        apply_default_values_for_required_fields=config.use_default,
+        force_optional_for_required_fields=config.force_optional,
+        class_name=config.class_name,
+        use_standard_collections=config.use_standard_collections,
+        use_schema_description=config.use_schema_description,
+        use_field_description=config.use_field_description,
+        use_attribute_docstrings=config.use_attribute_docstrings,
+        use_inline_field_description=config.use_inline_field_description,
+        use_default_kwarg=config.use_default_kwarg,
+        reuse_model=config.reuse_model,
+        reuse_scope=config.reuse_scope,
+        shared_module_name=config.shared_module_name,
+        encoding=config.encoding,
+        enum_field_as_literal=config.enum_field_as_literal,
+        use_one_literal_as_default=config.use_one_literal_as_default,
+        use_enum_values_in_discriminator=config.use_enum_values_in_discriminator,
+        set_default_enum_member=config.set_default_enum_member,
+        use_subclass_enum=config.use_subclass_enum,
+        use_specialized_enum=config.use_specialized_enum,
+        strict_nullable=config.strict_nullable,
+        use_generic_container_types=config.use_generic_container_types,
+        enable_faux_immutability=config.enable_faux_immutability,
+        disable_appending_item_suffix=config.disable_appending_item_suffix,
+        strict_types=config.strict_types,
+        empty_enum_field_name=config.empty_enum_field_name,
+        field_extra_keys=config.field_extra_keys,
+        field_include_all_keys=config.field_include_all_keys,
+        field_extra_keys_without_x_prefix=config.field_extra_keys_without_x_prefix,
+        openapi_scopes=config.openapi_scopes,
+        include_path_parameters=config.include_path_parameters,
+        wrap_string_literal=config.wrap_string_literal,
+        use_title_as_name=config.use_title_as_name,
+        use_operation_id_as_name=config.use_operation_id_as_name,
+        use_unique_items_as_set=config.use_unique_items_as_set,
+        allof_merge_mode=config.allof_merge_mode,
+        http_headers=config.http_headers,
+        http_ignore_tls=config.http_ignore_tls,
+        use_annotated=config.use_annotated,
+        use_serialize_as_any=config.use_serialize_as_any,
+        use_non_positive_negative_number_constrained_types=config.use_non_positive_negative_number_constrained_types,
+        use_decimal_for_multiple_of=config.use_decimal_for_multiple_of,
+        original_field_name_delimiter=config.original_field_name_delimiter,
+        use_double_quotes=config.use_double_quotes,
+        collapse_root_models=config.collapse_root_models,
+        skip_root_model=config.skip_root_model,
+        use_type_alias=config.use_type_alias,
+        use_union_operator=config.use_union_operator,
+        special_field_name_prefix=config.special_field_name_prefix,
+        remove_special_field_name_prefix=config.remove_special_field_name_prefix,
+        capitalise_enum_members=config.capitalise_enum_members,
+        keep_model_order=config.keep_model_order,
+        custom_file_header=config.custom_file_header,
+        custom_file_header_path=config.custom_file_header_path,
+        custom_formatters=config.custom_formatters,
+        custom_formatters_kwargs=custom_formatters_kwargs,
+        use_pendulum=config.use_pendulum,
+        http_query_parameters=config.http_query_parameters,
+        treat_dot_as_module=config.treat_dot_as_module,
+        use_exact_imports=config.use_exact_imports,
+        union_mode=config.union_mode,
+        output_datetime_class=config.output_datetime_class,
+        keyword_only=config.keyword_only,
+        frozen_dataclasses=config.frozen_dataclasses,
+        no_alias=config.no_alias,
+        use_frozen_field=config.use_frozen_field,
+        formatters=config.formatters,
+        settings_path=settings_path,
+        parent_scoped_naming=config.parent_scoped_naming,
+        dataclass_arguments=config.dataclass_arguments,
+        disable_future_imports=config.disable_future_imports,
+        type_mappings=config.type_mappings,
+        read_only_write_only_model_type=config.read_only_write_only_model_type,
+        all_exports_scope=config.all_exports_scope,
+        all_exports_collision_strategy=config.all_exports_collision_strategy,
+        module_split_mode=config.module_split_mode,
+    )
+
+
+def main(args: Sequence[str] | None = None) -> Exit:  # noqa: PLR0911, PLR0912, PLR0914, PLR0915
+    """Execute datamodel code generation from command-line arguments."""
+    argcomplete.autocomplete(arg_parser)
+
+    if args is None:  # pragma: no cover
+        args = sys.argv[1:]
+
+    arg_parser.parse_args(args, namespace=namespace)
+
+    if namespace.version:
+        from datamodel_code_generator import get_version  # noqa: PLC0415
+
+        print(get_version())  # noqa: T201
+        sys.exit(0)
+
+    if namespace.generate_pyproject_config:
+        config_output = generate_pyproject_config(namespace)
+        print(config_output)  # noqa: T201
+        return Exit.OK
+
+    # Handle --ignore-pyproject and --profile options
+    if namespace.ignore_pyproject:
+        pyproject_config: dict[str, Any] = {}
+    else:
+        try:
+            pyproject_config = _get_pyproject_toml_config(Path.cwd(), profile=namespace.profile)
+        except Error as e:
+            print(e.message, file=sys.stderr)  # noqa: T201
+            return Exit.ERROR
+
+    if namespace.generate_cli_command:
+        if not pyproject_config:
+            print(  # noqa: T201
+                "No [tool.datamodel-codegen] section found in pyproject.toml",
+                file=sys.stderr,
+            )
+            return Exit.ERROR
+        command_output = generate_cli_command(pyproject_config)
+        print(command_output)  # noqa: T201
+        return Exit.OK
+
+    try:
+        config = Config.parse_obj(pyproject_config)
+        config.merge_args(namespace)
+    except Error as e:
+        print(e.message, file=sys.stderr)  # noqa: T201
+        return Exit.ERROR
+
+    if not config.input and not config.url and sys.stdin.isatty():
+        print(  # noqa: T201
+            "Not Found Input: require `stdin` or arguments `--input` or `--url`",
+            file=sys.stderr,
+        )
+        arg_parser.print_help()
+        return Exit.ERROR
+
+    if config.check and config.output is None:
+        print(  # noqa: T201
+            "Error: --check cannot be used with stdout output (no --output specified)",
+            file=sys.stderr,
+        )
+        return Exit.ERROR
+
+    if config.watch and config.check:
+        print(  # noqa: T201
+            "Error: --watch and --check cannot be used together",
+            file=sys.stderr,
+        )
+        return Exit.ERROR
+
+    if config.watch and (config.input is None or is_url(str(config.input))):
+        print(  # noqa: T201
+            "Error: --watch requires --input file path (not URL or stdin)",
+            file=sys.stderr,
+        )
+        return Exit.ERROR
+
+    if not is_supported_in_black(config.target_python_version):  # pragma: no cover
+        print(  # noqa: T201
+            f"Installed black doesn't support Python version {config.target_python_version.value}.\n"
+            f"You have to install a newer black.\n"
+            f"Installed black version: {_get_black().__version__}",
+            file=sys.stderr,
+        )
+        return Exit.ERROR
+
+    if config.debug:  # pragma: no cover
+        enable_debug_message()
+
+    if config.disable_warnings:
+        warnings.simplefilter("ignore")
+
+    if config.reuse_scope == ReuseScope.Tree and not config.reuse_model:
+        print(  # noqa: T201
+            "Warning: --reuse-scope=tree has no effect without --reuse-model",
+            file=sys.stderr,
+        )
+
+    if (
+        config.use_specialized_enum
+        and namespace.use_specialized_enum is not False  # CLI didn't disable it
+        and (namespace.use_specialized_enum is True or pyproject_config.get("use_specialized_enum") is True)
+        and not config.target_python_version.has_strenum
+    ):
+        print(  # noqa: T201
+            f"Error: --use-specialized-enum requires --target-python-version 3.11 or later.\n"
+            f"Current target version: {config.target_python_version.value}\n"
+            f"StrEnum is only available in Python 3.11+.",
+            file=sys.stderr,
+        )
+        return Exit.ERROR
+
+    extra_template_data: defaultdict[str, dict[str, Any]] | None
+    if config.extra_template_data is None:
+        extra_template_data = None
+    else:
+        with config.extra_template_data as data:
+            try:
+                extra_template_data = json.load(data, object_hook=lambda d: defaultdict(dict, **d))
+            except json.JSONDecodeError as e:
+                print(f"Unable to load extra template data: {e}", file=sys.stderr)  # noqa: T201
+                return Exit.ERROR
+
+    if config.aliases is None:
+        aliases = None
+    else:
+        with config.aliases as data:
+            try:
+                aliases = json.load(data)
+            except json.JSONDecodeError as e:
+                print(f"Unable to load alias mapping: {e}", file=sys.stderr)  # noqa: T201
+                return Exit.ERROR
+        if not isinstance(aliases, dict) or not all(
+            isinstance(k, str) and isinstance(v, str) for k, v in aliases.items()
+        ):
+            print(  # noqa: T201
+                'Alias mapping must be a JSON string mapping (e.g. {"from": "to", ...})',
+                file=sys.stderr,
+            )
+            return Exit.ERROR
+
+    if config.custom_formatters_kwargs is None:
+        custom_formatters_kwargs = None
+    else:
+        with config.custom_formatters_kwargs as data:
+            try:
+                custom_formatters_kwargs = json.load(data)
+            except json.JSONDecodeError as e:  # pragma: no cover
+                print(  # noqa: T201
+                    f"Unable to load custom_formatters_kwargs mapping: {e}",
+                    file=sys.stderr,
+                )
+                return Exit.ERROR
+        if not isinstance(custom_formatters_kwargs, dict) or not all(
+            isinstance(k, str) and isinstance(v, str) for k, v in custom_formatters_kwargs.items()
+        ):  # pragma: no cover
+            print(  # noqa: T201
+                'Custom formatters kwargs mapping must be a JSON string mapping (e.g. {"from": "to", ...})',
+                file=sys.stderr,
+            )
+            return Exit.ERROR
+
+    if config.check:
+        config_output = cast("Path", config.output)
+        is_directory_output = not config_output.suffix
+        temp_context: tempfile.TemporaryDirectory[str] | None = tempfile.TemporaryDirectory()
+        temp_dir = Path(temp_context.name)
+        if is_directory_output:
+            generate_output: Path | None = temp_dir / config_output.name
+        else:
+            generate_output = temp_dir / "output.py"
+    else:
+        temp_context = None
+        generate_output = config.output
+        is_directory_output = False
+
+    try:
+        run_generate_from_config(
+            config=config,
+            input_=config.url or config.input or sys.stdin.read(),
+            output=generate_output,
+            extra_template_data=extra_template_data,
+            aliases=aliases,
+            command_line=shlex.join(["datamodel-codegen", *args]) if config.enable_command_header else None,
+            custom_formatters_kwargs=custom_formatters_kwargs,
+            settings_path=config.output if config.check else None,
+        )
+    except InvalidClassNameError as e:
+        print(f"{e} You have to set `--class-name` option", file=sys.stderr)  # noqa: T201
+        if temp_context is not None:
+            temp_context.cleanup()
+        return Exit.ERROR
+    except Error as e:
+        print(str(e), file=sys.stderr)  # noqa: T201
+        if temp_context is not None:
+            temp_context.cleanup()
+        return Exit.ERROR
+    except Exception:  # noqa: BLE001
+        import traceback  # noqa: PLC0415
+
+        print(traceback.format_exc(), file=sys.stderr)  # noqa: T201
+        if temp_context is not None:
+            temp_context.cleanup()
+        return Exit.ERROR
+
+    if config.check and config.output is not None and generate_output is not None:
+        has_differences = False
+
+        if is_directory_output:
+            diffs, missing_files, extra_files = _compare_directories(generate_output, config.output, config.encoding)
+            if diffs:
+                print("".join(diffs), end="")  # noqa: T201
+                has_differences = True
+            for missing in missing_files:
+                print(f"MISSING: {missing} (should be generated)")  # noqa: T201
+                has_differences = True
+            for extra in extra_files:
+                print(f"EXTRA: {extra} (no longer generated)")  # noqa: T201
+                has_differences = True
+        else:
+            diff_found, diff_lines = _compare_single_file(generate_output, config.output, config.encoding)
+            if diff_found:
+                print("".join(diff_lines), end="")  # noqa: T201
+                has_differences = True
+
+        if temp_context is not None:  # pragma: no branch
+            temp_context.cleanup()
+
+        return Exit.DIFF if has_differences else Exit.OK
+
+    if config.watch:
+        try:
+            from datamodel_code_generator.watch import watch_and_regenerate  # noqa: PLC0415
+
+            return watch_and_regenerate(config, extra_template_data, aliases, custom_formatters_kwargs)
+        except Exception as e:  # noqa: BLE001
+            print(str(e), file=sys.stderr)  # noqa: T201
+            return Exit.ERROR
+
+    return Exit.OK
+
+
+if __name__ == "__main__":
+    sys.exit(main())
diff -pruN 0.26.4-3/src/datamodel_code_generator/arguments.py 0.45.0-1/src/datamodel_code_generator/arguments.py
--- 0.26.4-3/src/datamodel_code_generator/arguments.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/arguments.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,798 @@
+"""CLI argument definitions for datamodel-codegen.
+
+Defines the ArgumentParser and all command-line options organized into groups:
+base options, typing customization, field customization, model customization,
+template customization, OpenAPI-specific options, and general options.
+"""
+
+from __future__ import annotations
+
+import json
+import locale
+from argparse import ArgumentParser, ArgumentTypeError, BooleanOptionalAction, Namespace, RawDescriptionHelpFormatter
+from operator import attrgetter
+from pathlib import Path
+from typing import TYPE_CHECKING, cast
+
+from datamodel_code_generator import (
+    DEFAULT_SHARED_MODULE_NAME,
+    AllExportsCollisionStrategy,
+    AllExportsScope,
+    AllOfMergeMode,
+    DataclassArguments,
+    DataModelType,
+    InputFileType,
+    ModuleSplitMode,
+    OpenAPIScope,
+    ReadOnlyWriteOnlyModelType,
+    ReuseScope,
+)
+from datamodel_code_generator.format import DatetimeClassType, Formatter, PythonVersion
+from datamodel_code_generator.model.pydantic_v2 import UnionMode
+from datamodel_code_generator.parser import LiteralType
+from datamodel_code_generator.types import StrictTypes
+
+if TYPE_CHECKING:
+    from argparse import Action
+    from collections.abc import Iterable
+
+DEFAULT_ENCODING = locale.getpreferredencoding()
+
+namespace = Namespace(no_color=False)
+
+
+def _dataclass_arguments(value: str) -> DataclassArguments:
+    """Parse JSON string and validate it as DataclassArguments."""
+    try:
+        result = json.loads(value)
+    except json.JSONDecodeError as e:
+        msg = f"Invalid JSON: {e}"
+        raise ArgumentTypeError(msg) from e
+    if not isinstance(result, dict):
+        msg = f"Expected a JSON dictionary, got {type(result).__name__}"
+        raise ArgumentTypeError(msg)
+    valid_keys = set(DataclassArguments.__annotations__.keys())
+    invalid_keys = set(result.keys()) - valid_keys
+    if invalid_keys:
+        msg = f"Invalid keys: {invalid_keys}. Valid keys are: {valid_keys}"
+        raise ArgumentTypeError(msg)
+    for key, val in result.items():
+        if not isinstance(val, bool):
+            msg = f"Expected bool for '{key}', got {type(val).__name__}"
+            raise ArgumentTypeError(msg)
+    return cast("DataclassArguments", result)
+
+
+class SortingHelpFormatter(RawDescriptionHelpFormatter):
+    """Help formatter that sorts arguments, adds color to section headers, and preserves epilog formatting."""
+
+    def _bold_cyan(self, text: str) -> str:  # noqa: PLR6301
+        """Wrap text in ANSI bold cyan escape codes."""
+        return f"\x1b[36;1m{text}\x1b[0m"
+
+    def add_arguments(self, actions: Iterable[Action]) -> None:
+        """Add arguments sorted by option strings."""
+        actions = sorted(actions, key=attrgetter("option_strings"))
+        super().add_arguments(actions)
+
+    def start_section(self, heading: str | None) -> None:
+        """Start a section with optional colored heading."""
+        return super().start_section(heading if namespace.no_color or not heading else self._bold_cyan(heading))
+
+
+arg_parser = ArgumentParser(
+    usage="\n  datamodel-codegen [options]",
+    description="Generate Python data models from schema definitions or structured data\n\n"
+    "For detailed usage, see: https://koxudaxi.github.io/datamodel-code-generator",
+    epilog="Documentation: https://koxudaxi.github.io/datamodel-code-generator\n"
+    "GitHub: https://github.com/koxudaxi/datamodel-code-generator",
+    formatter_class=SortingHelpFormatter,
+    add_help=False,
+)
+
+base_options = arg_parser.add_argument_group("Options")
+typing_options = arg_parser.add_argument_group("Typing customization")
+field_options = arg_parser.add_argument_group("Field customization")
+model_options = arg_parser.add_argument_group("Model customization")
+extra_fields_model_options = model_options.add_mutually_exclusive_group()
+template_options = arg_parser.add_argument_group("Template customization")
+openapi_options = arg_parser.add_argument_group("OpenAPI-only options")
+general_options = arg_parser.add_argument_group("General options")
+
+# ======================================================================================
+# Base options for input/output
+# ======================================================================================
+base_options.add_argument(
+    "--http-headers",
+    nargs="+",
+    metavar="HTTP_HEADER",
+    help='Set headers in HTTP requests to the remote host. (example: "Authorization: Basic dXNlcjpwYXNz")',
+)
+base_options.add_argument(
+    "--http-query-parameters",
+    nargs="+",
+    metavar="HTTP_QUERY_PARAMETERS",
+    help='Set query parameters in HTTP requests to the remote host. (example: "ref=branch")',
+)
+base_options.add_argument(
+    "--http-ignore-tls",
+    help="Disable verification of the remote host's TLS certificate",
+    action="store_true",
+    default=None,
+)
+base_options.add_argument(
+    "--input",
+    help="Input file/directory (default: stdin)",
+)
+base_options.add_argument(
+    "--input-file-type",
+    help="Input file type (default: auto)",
+    choices=[i.value for i in InputFileType],
+)
+base_options.add_argument(
+    "--output",
+    help="Output file (default: stdout)",
+)
+base_options.add_argument(
+    "--output-model-type",
+    help="Output model type (default: pydantic.BaseModel)",
+    choices=[i.value for i in DataModelType],
+)
+base_options.add_argument(
+    "--url",
+    help="Input file URL. `--input` is ignored when `--url` is used",
+)
+
+# ======================================================================================
+# Customization options for generated models
+# ======================================================================================
+extra_fields_model_options.add_argument(
+    "--allow-extra-fields",
+    help="Deprecated: Allow passing extra fields. This flag is deprecated. Use `--extra-fields=allow` instead.",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--allow-population-by-field-name",
+    help="Allow population by field name",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--class-name",
+    help="Set class name of root model",
+    default=None,
+)
+model_options.add_argument(
+    "--collapse-root-models",
+    action="store_true",
+    default=None,
+    help="Models generated with a root-type field will be merged into the models using that root-type model",
+)
+model_options.add_argument(
+    "--skip-root-model",
+    action="store_true",
+    default=None,
+    help="Skip generating the model for the root schema element",
+)
+model_options.add_argument(
+    "--disable-appending-item-suffix",
+    help="Disable appending `Item` suffix to model name in an array",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--disable-timestamp",
+    help="Disable timestamp on file headers",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--enable-faux-immutability",
+    help="Enable faux immutability",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--enable-version-header",
+    help="Enable package version on file headers",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--enable-command-header",
+    help="Enable command-line options on file headers for reproducibility",
+    action="store_true",
+    default=None,
+)
+extra_fields_model_options.add_argument(
+    "--extra-fields",
+    help="Set the generated models to allow, forbid, or ignore extra fields.",
+    choices=["allow", "ignore", "forbid"],
+    default=None,
+)
+model_options.add_argument(
+    "--keep-model-order",
+    help="Keep generated models' order",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--keyword-only",
+    help="Defined models as keyword only (for example dataclass(kw_only=True)).",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--frozen-dataclasses",
+    help="Generate frozen dataclasses (dataclass(frozen=True)). Only applies to dataclass output.",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--dataclass-arguments",
+    type=_dataclass_arguments,
+    default=None,
+    help=(
+        "Custom dataclass arguments as a JSON dictionary, "
+        'e.g. \'{"frozen": true, "kw_only": true}\'. '
+        "Overrides --frozen-dataclasses and similar flags."
+    ),
+)
+model_options.add_argument(
+    "--reuse-model",
+    help="Reuse models on the field when a module has the model with the same content",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--reuse-scope",
+    help="Scope for model reuse deduplication: module (per-file, default) or tree (cross-file with shared module). "
+    "Only effective when --reuse-model is set.",
+    choices=[s.value for s in ReuseScope],
+    default=None,
+)
+model_options.add_argument(
+    "--shared-module-name",
+    help=f'Name of the shared module for --reuse-scope=tree (default: "{DEFAULT_SHARED_MODULE_NAME}"). '
+    f'Use this option if your schema has a file named "{DEFAULT_SHARED_MODULE_NAME}".',
+    default=None,
+)
+model_options.add_argument(
+    "--target-python-version",
+    help="target python version",
+    choices=[v.value for v in PythonVersion],
+)
+model_options.add_argument(
+    "--treat-dot-as-module",
+    help="treat dotted module names as modules",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--use-schema-description",
+    help="Use schema description to populate class docstring",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--use-title-as-name",
+    help="use titles as class names of models",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--use-pendulum",
+    help="use pendulum instead of datetime",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--use-exact-imports",
+    help='import exact types instead of modules, for example: "from .foo import Bar" instead of '
+    '"from . import foo" with "foo.Bar"',
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--output-datetime-class",
+    help="Choose Datetime class between AwareDatetime, NaiveDatetime or datetime. "
+    "Each output model has its default mapping (for example pydantic: datetime, dataclass: str, ...)",
+    choices=[i.value for i in DatetimeClassType],
+    default=None,
+)
+model_options.add_argument(
+    "--parent-scoped-naming",
+    help="Set name of models defined inline from the parent model",
+    action="store_true",
+    default=None,
+)
+model_options.add_argument(
+    "--all-exports-scope",
+    help="Generate __all__ in __init__.py with re-exports. "
+    "'children': export from direct child modules only. "
+    "'recursive': export from all descendant modules.",
+    choices=[s.value for s in AllExportsScope],
+    default=None,
+)
+model_options.add_argument(
+    "--all-exports-collision-strategy",
+    help="Strategy for name collisions when using --all-exports-scope=recursive. "
+    "'error': raise an error (default). "
+    "'minimal-prefix': add module prefix only to colliding names. "
+    "'full-prefix': add full module path prefix to colliding names.",
+    choices=[s.value for s in AllExportsCollisionStrategy],
+    default=None,
+)
+model_options.add_argument(
+    "--module-split-mode",
+    help="Split generated models into separate files. 'single': generate one file per model class.",
+    choices=[m.value for m in ModuleSplitMode],
+    default=None,
+)
+
+# ======================================================================================
+# Typing options for generated models
+# ======================================================================================
+typing_options.add_argument(
+    "--base-class",
+    help="Base Class (default: pydantic.BaseModel)",
+    type=str,
+)
+typing_options.add_argument(
+    "--enum-field-as-literal",
+    help="Parse enum field as literal. "
+    "all: all enum field type are Literal. "
+    "one: field type is Literal when an enum has only one possible value",
+    choices=[lt.value for lt in LiteralType],
+    default=None,
+)
+typing_options.add_argument(
+    "--field-constraints",
+    help="Use field constraints and not con* annotations",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--set-default-enum-member",
+    help="Set enum members as default values for enum field",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--strict-types",
+    help="Use strict types",
+    choices=[t.value for t in StrictTypes],
+    nargs="+",
+)
+typing_options.add_argument(
+    "--use-annotated",
+    help="Use typing.Annotated for Field(). Also, `--field-constraints` option will be enabled.",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--use-serialize-as-any",
+    help="Use pydantic.SerializeAsAny for fields with types that have subtypes (Pydantic v2 only)",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--use-generic-container-types",
+    help="Use generic container types for type hinting (typing.Sequence, typing.Mapping). "
+    "If `--use-standard-collections` option is set, then import from collections.abc instead of typing",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--use-non-positive-negative-number-constrained-types",
+    help="Use the Non{Positive,Negative}{FloatInt} types instead of the corresponding con* constrained types.",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--use-decimal-for-multiple-of",
+    help="Use condecimal instead of confloat for float/number fields with multipleOf constraint "
+    "(Pydantic only). Avoids floating-point precision issues in validation.",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--use-one-literal-as-default",
+    help="Use one literal as default value for one literal field",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--use-enum-values-in-discriminator",
+    help="Use enum member literals in discriminator fields instead of string literals",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--use-standard-collections",
+    help="Use standard collections for type hinting (list, dict)",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--use-subclass-enum",
+    help="Define generic Enum class as subclass with field type when enum has type (int, float, bytes, str)",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--use-specialized-enum",
+    help="Use specialized Enum class (StrEnum, IntEnum). Requires --target-python-version 3.11+",
+    action=BooleanOptionalAction,
+    default=None,
+)
+typing_options.add_argument(
+    "--use-union-operator",
+    help="Use | operator for Union type (PEP 604).",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--use-unique-items-as-set",
+    help="define field type as `set` when the field attribute has `uniqueItems`",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--allof-merge-mode",
+    help="Mode for field merging in allOf schemas. "
+    "'constraints': merge only constraints (minItems, maxItems, pattern, etc.) from parent (default). "
+    "'all': merge constraints plus annotations (default, examples) from parent. "
+    "'none': do not merge any fields from parent properties.",
+    choices=[m.value for m in AllOfMergeMode],
+    default=None,
+)
+typing_options.add_argument(
+    "--use-type-alias",
+    help="Use TypeAlias instead of root models (experimental)",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--disable-future-imports",
+    help="Disable __future__ imports",
+    action="store_true",
+    default=None,
+)
+typing_options.add_argument(
+    "--type-mappings",
+    help="Override default type mappings. "
+    'Format: "type+format=target" (e.g., "string+binary=string" to map binary format to string type) '
+    'or "format=target" (e.g., "binary=string"). '
+    "Can be specified multiple times.",
+    nargs="+",
+    type=str,
+    default=None,
+)
+
+# ======================================================================================
+# Customization options for generated model fields
+# ======================================================================================
+field_options.add_argument(
+    "--capitalise-enum-members",
+    "--capitalize-enum-members",
+    help="Capitalize field names on enum",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--empty-enum-field-name",
+    help="Set field name when enum value is empty (default:  `_`)",
+    default=None,
+)
+field_options.add_argument(
+    "--field-extra-keys",
+    help="Add extra keys to field parameters",
+    type=str,
+    nargs="+",
+)
+field_options.add_argument(
+    "--field-extra-keys-without-x-prefix",
+    help="Add extra keys with `x-` prefix to field parameters. The extra keys are stripped of the `x-` prefix.",
+    type=str,
+    nargs="+",
+)
+field_options.add_argument(
+    "--field-include-all-keys",
+    help="Add all keys to field parameters",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--force-optional",
+    help="Force optional for required fields",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--original-field-name-delimiter",
+    help="Set delimiter to convert to snake case. This option only can be used with --snake-case-field (default: `_` )",
+    default=None,
+)
+field_options.add_argument(
+    "--remove-special-field-name-prefix",
+    help="Remove field name prefix if it has a special meaning e.g. underscores",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--snake-case-field",
+    help="Change camel-case field name to snake-case",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--special-field-name-prefix",
+    help="Set field name prefix when first character can't be used as Python field name (default:  `field`)",
+    default=None,
+)
+field_options.add_argument(
+    "--strip-default-none",
+    help="Strip default None on fields",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--use-default",
+    help="Use default value even if a field is required",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--use-default-kwarg",
+    action="store_true",
+    help="Use `default=` instead of a positional argument for Fields that have default values.",
+    default=None,
+)
+field_options.add_argument(
+    "--use-field-description",
+    help="Use schema description to populate field docstring",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--use-attribute-docstrings",
+    help="Set use_attribute_docstrings=True in Pydantic v2 ConfigDict",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--use-inline-field-description",
+    help="Use schema description to populate field docstring as inline docstring",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--union-mode",
+    help="Union mode for only pydantic v2 field",
+    choices=[u.value for u in UnionMode],
+    default=None,
+)
+field_options.add_argument(
+    "--no-alias",
+    help="""Do not add a field alias. E.g., if --snake-case-field is used along with a base class, which has an
+            alias_generator""",
+    action="store_true",
+    default=None,
+)
+field_options.add_argument(
+    "--use-frozen-field",
+    help="Use Field(frozen=True) for readOnly fields (Pydantic v2) or Field(allow_mutation=False) (Pydantic v1)",
+    action="store_true",
+    default=None,
+)
+
+# ======================================================================================
+# Options for templating output
+# ======================================================================================
+template_options.add_argument(
+    "--aliases",
+    help="Alias mapping file (JSON) for renaming fields. "
+    "Supports hierarchical formats: "
+    "Flat: {'field': 'alias'} applies to all occurrences. "
+    "Scoped: {'ClassName.field': 'alias'} applies to specific class. "
+    "Priority: scoped > flat. "
+    "Example: {'User.name': 'user_name', 'Address.name': 'addr_name', 'id': 'id_'}",
+    type=Path,
+)
+template_options.add_argument(
+    "--custom-file-header",
+    help="Custom file header",
+    type=str,
+    default=None,
+)
+template_options.add_argument(
+    "--custom-file-header-path",
+    help="Custom file header file path",
+    default=None,
+    type=str,
+)
+template_options.add_argument(
+    "--custom-template-dir",
+    help="Custom template directory",
+    type=str,
+)
+template_options.add_argument(
+    "--encoding",
+    help=f"The encoding of input and output (default: {DEFAULT_ENCODING})",
+    default=None,
+)
+template_options.add_argument(
+    "--extra-template-data",
+    help="Extra template data for output models. Input is supposed to be a json/yaml file. "
+    "For OpenAPI and Jsonschema the keys are the spec path of the object, or the name of the object if you want to "
+    "apply the template data to multiple objects with the same name. "
+    "If you are using another input file type (e.g. GraphQL), the key is the name of the object. "
+    "The value is a dictionary of the template data to add.",
+    type=Path,
+)
+template_options.add_argument(
+    "--use-double-quotes",
+    action="store_true",
+    default=None,
+    help="Model generated with double quotes. Single quotes or "
+    "your black config skip_string_normalization value will be used without this option.",
+)
+template_options.add_argument(
+    "--wrap-string-literal",
+    help="Wrap string literal by using black `experimental-string-processing` option (require black 20.8b0 or later)",
+    action="store_true",
+    default=None,
+)
+base_options.add_argument(
+    "--additional-imports",
+    help='Custom imports for output (delimited list input). For example "datetime.date,datetime.datetime"',
+    type=str,
+    default=None,
+)
+base_options.add_argument(
+    "--formatters",
+    help="Formatters for output (default: [black, isort])",
+    choices=[f.value for f in Formatter],
+    nargs="+",
+    default=None,
+)
+base_options.add_argument(
+    "--custom-formatters",
+    help="List of modules with custom formatter (delimited list input).",
+    type=str,
+    default=None,
+)
+template_options.add_argument(
+    "--custom-formatters-kwargs",
+    help="A file with kwargs for custom formatters.",
+    type=Path,
+)
+
+# ======================================================================================
+# Options specific to OpenAPI input schemas
+# ======================================================================================
+openapi_options.add_argument(
+    "--openapi-scopes",
+    help="Scopes of OpenAPI model generation (default: schemas)",
+    choices=[o.value for o in OpenAPIScope],
+    nargs="+",
+    default=None,
+)
+openapi_options.add_argument(
+    "--strict-nullable",
+    help="Treat default field as a non-nullable field (Only OpenAPI)",
+    action="store_true",
+    default=None,
+)
+openapi_options.add_argument(
+    "--use-operation-id-as-name",
+    help="use operation id of OpenAPI as class names of models",
+    action="store_true",
+    default=None,
+)
+openapi_options.add_argument(
+    "--include-path-parameters",
+    help="Include path parameters in generated parameter models in addition to query parameters (Only OpenAPI)",
+    action="store_true",
+    default=None,
+)
+openapi_options.add_argument(
+    "--validation",
+    help="Deprecated: Enable validation (Only OpenAPI). this option is deprecated. it will be removed in future "
+    "releases",
+    action="store_true",
+    default=None,
+)
+openapi_options.add_argument(
+    "--read-only-write-only-model-type",
+    help="Model generation for readOnly/writeOnly fields: "
+    "'request-response' = Request/Response models only (no base model), "
+    "'all' = Base + Request + Response models.",
+    choices=[e.value for e in ReadOnlyWriteOnlyModelType],
+    default=None,
+)
+
+# ======================================================================================
+# General options
+# ======================================================================================
+general_options.add_argument(
+    "--check",
+    action="store_true",
+    default=None,
+    help="Verify generated files are up-to-date without modifying them. "
+    "Exits with code 1 if differences found, 0 if up-to-date. "
+    "Useful for CI to ensure generated code is committed.",
+)
+general_options.add_argument(
+    "--debug",
+    help="show debug message (require \"debug\". `$ pip install 'datamodel-code-generator[debug]'`)",
+    action="store_true",
+    default=None,
+)
+general_options.add_argument(
+    "--disable-warnings",
+    help="disable warnings",
+    action="store_true",
+    default=None,
+)
+general_options.add_argument(
+    "-h",
+    "--help",
+    action="help",
+    default="==SUPPRESS==",
+    help="show this help message and exit",
+)
+general_options.add_argument(
+    "--no-color",
+    action="store_true",
+    default=False,
+    help="disable colorized output",
+)
+general_options.add_argument(
+    "--generate-pyproject-config",
+    action="store_true",
+    default=None,
+    help="Generate pyproject.toml configuration from the provided CLI arguments and exit",
+)
+general_options.add_argument(
+    "--generate-cli-command",
+    action="store_true",
+    default=None,
+    help="Generate CLI command from pyproject.toml configuration and exit",
+)
+general_options.add_argument(
+    "--ignore-pyproject",
+    action="store_true",
+    default=False,
+    help="Ignore pyproject.toml configuration",
+)
+general_options.add_argument(
+    "--profile",
+    help="Use a named profile from pyproject.toml [tool.datamodel-codegen.profiles.<name>]",
+    default=None,
+)
+general_options.add_argument(
+    "--watch",
+    action="store_true",
+    default=None,
+    help="Watch input file(s) for changes and regenerate output automatically",
+)
+general_options.add_argument(
+    "--watch-delay",
+    type=float,
+    default=None,
+    help="Debounce delay in seconds for watch mode (default: 0.5)",
+)
+general_options.add_argument(
+    "--version",
+    action="store_true",
+    help="show version",
+)
+
+__all__ = [
+    "DEFAULT_ENCODING",
+    "arg_parser",
+    "namespace",
+]
diff -pruN 0.26.4-3/src/datamodel_code_generator/cli_options.py 0.45.0-1/src/datamodel_code_generator/cli_options.py
--- 0.26.4-3/src/datamodel_code_generator/cli_options.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/cli_options.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,295 @@
+"""CLI option metadata for documentation.
+
+This module provides metadata for CLI options used in documentation generation.
+The argparse definitions in arguments.py remain the source of truth for CLI behavior.
+This module only adds documentation-specific metadata (category, since_version, etc.).
+
+Synchronization between this module and argparse is verified by tests in
+tests/cli_doc/test_cli_options_sync.py.
+"""
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from enum import Enum
+from functools import lru_cache
+
+
+class OptionCategory(str, Enum):
+    """Categories for organizing CLI options in documentation."""
+
+    BASE = "Base Options"
+    TYPING = "Typing Customization"
+    FIELD = "Field Customization"
+    MODEL = "Model Customization"
+    TEMPLATE = "Template Customization"
+    OPENAPI = "OpenAPI-only Options"
+    GENERAL = "General Options"
+
+
+@dataclass(frozen=True)
+class CLIOptionMeta:
+    """Documentation metadata for a CLI option.
+
+    This is NOT the argparse definition - it only contains documentation metadata.
+    The actual CLI behavior is defined in arguments.py.
+    """
+
+    name: str
+    category: OptionCategory
+    since_version: str | None = None
+    deprecated: bool = False
+    deprecated_message: str | None = None
+
+
+# Options with manual documentation (not auto-generated from tests)
+# These options have hand-written docs in docs/cli-reference/manual/
+MANUAL_DOCS: frozenset[str] = frozenset({
+    "--help",
+    "--version",
+    "--debug",
+    "--profile",
+    "--no-color",
+})
+
+# Backward compatibility alias
+EXCLUDED_FROM_DOCS = MANUAL_DOCS
+
+# Documentation metadata for CLI options
+# Sync is verified by tests/cli_doc/test_cli_options_sync.py
+CLI_OPTION_META: dict[str, CLIOptionMeta] = {
+    # ==========================================================================
+    # Base Options (Input/Output)
+    # ==========================================================================
+    "--input": CLIOptionMeta(name="--input", category=OptionCategory.BASE),
+    "--output": CLIOptionMeta(name="--output", category=OptionCategory.BASE),
+    "--url": CLIOptionMeta(name="--url", category=OptionCategory.BASE),
+    "--input-file-type": CLIOptionMeta(name="--input-file-type", category=OptionCategory.BASE),
+    "--encoding": CLIOptionMeta(name="--encoding", category=OptionCategory.BASE),
+    # ==========================================================================
+    # Model Customization
+    # ==========================================================================
+    "--output-model-type": CLIOptionMeta(name="--output-model-type", category=OptionCategory.MODEL),
+    "--target-python-version": CLIOptionMeta(name="--target-python-version", category=OptionCategory.MODEL),
+    "--base-class": CLIOptionMeta(name="--base-class", category=OptionCategory.MODEL),
+    "--class-name": CLIOptionMeta(name="--class-name", category=OptionCategory.MODEL),
+    "--frozen-dataclasses": CLIOptionMeta(name="--frozen-dataclasses", category=OptionCategory.MODEL),
+    "--keyword-only": CLIOptionMeta(name="--keyword-only", category=OptionCategory.MODEL),
+    "--reuse-model": CLIOptionMeta(name="--reuse-model", category=OptionCategory.MODEL),
+    "--reuse-scope": CLIOptionMeta(name="--reuse-scope", category=OptionCategory.MODEL),
+    "--collapse-root-models": CLIOptionMeta(name="--collapse-root-models", category=OptionCategory.MODEL),
+    "--keep-model-order": CLIOptionMeta(name="--keep-model-order", category=OptionCategory.MODEL),
+    "--allow-extra-fields": CLIOptionMeta(name="--allow-extra-fields", category=OptionCategory.MODEL),
+    "--allow-population-by-field-name": CLIOptionMeta(
+        name="--allow-population-by-field-name", category=OptionCategory.MODEL
+    ),
+    "--enable-faux-immutability": CLIOptionMeta(name="--enable-faux-immutability", category=OptionCategory.MODEL),
+    "--use-subclass-enum": CLIOptionMeta(name="--use-subclass-enum", category=OptionCategory.MODEL),
+    "--force-optional": CLIOptionMeta(name="--force-optional", category=OptionCategory.MODEL),
+    "--strict-nullable": CLIOptionMeta(name="--strict-nullable", category=OptionCategory.MODEL),
+    "--use-default": CLIOptionMeta(name="--use-default", category=OptionCategory.MODEL),
+    "--use-default-kwarg": CLIOptionMeta(name="--use-default-kwarg", category=OptionCategory.MODEL),
+    "--strip-default-none": CLIOptionMeta(name="--strip-default-none", category=OptionCategory.MODEL),
+    "--dataclass-arguments": CLIOptionMeta(name="--dataclass-arguments", category=OptionCategory.MODEL),
+    "--use-frozen-field": CLIOptionMeta(name="--use-frozen-field", category=OptionCategory.MODEL),
+    "--union-mode": CLIOptionMeta(name="--union-mode", category=OptionCategory.MODEL),
+    "--parent-scoped-naming": CLIOptionMeta(name="--parent-scoped-naming", category=OptionCategory.MODEL),
+    "--use-one-literal-as-default": CLIOptionMeta(name="--use-one-literal-as-default", category=OptionCategory.MODEL),
+    "--use-serialize-as-any": CLIOptionMeta(name="--use-serialize-as-any", category=OptionCategory.MODEL),
+    "--skip-root-model": CLIOptionMeta(name="--skip-root-model", category=OptionCategory.MODEL),
+    # ==========================================================================
+    # Field Customization
+    # ==========================================================================
+    "--snake-case-field": CLIOptionMeta(name="--snake-case-field", category=OptionCategory.FIELD),
+    "--original-field-name-delimiter": CLIOptionMeta(
+        name="--original-field-name-delimiter", category=OptionCategory.FIELD
+    ),
+    "--capitalize-enum-members": CLIOptionMeta(name="--capitalize-enum-members", category=OptionCategory.FIELD),
+    "--special-field-name-prefix": CLIOptionMeta(name="--special-field-name-prefix", category=OptionCategory.FIELD),
+    "--remove-special-field-name-prefix": CLIOptionMeta(
+        name="--remove-special-field-name-prefix", category=OptionCategory.FIELD
+    ),
+    "--empty-enum-field-name": CLIOptionMeta(name="--empty-enum-field-name", category=OptionCategory.FIELD),
+    "--set-default-enum-member": CLIOptionMeta(name="--set-default-enum-member", category=OptionCategory.FIELD),
+    "--aliases": CLIOptionMeta(name="--aliases", category=OptionCategory.FIELD),
+    "--no-alias": CLIOptionMeta(name="--no-alias", category=OptionCategory.FIELD),
+    "--use-title-as-name": CLIOptionMeta(name="--use-title-as-name", category=OptionCategory.FIELD),
+    "--use-schema-description": CLIOptionMeta(name="--use-schema-description", category=OptionCategory.FIELD),
+    "--use-field-description": CLIOptionMeta(name="--use-field-description", category=OptionCategory.FIELD),
+    "--use-attribute-docstrings": CLIOptionMeta(name="--use-attribute-docstrings", category=OptionCategory.FIELD),
+    "--use-inline-field-description": CLIOptionMeta(
+        name="--use-inline-field-description", category=OptionCategory.FIELD
+    ),
+    "--field-constraints": CLIOptionMeta(name="--field-constraints", category=OptionCategory.FIELD),
+    "--field-extra-keys": CLIOptionMeta(name="--field-extra-keys", category=OptionCategory.FIELD),
+    "--field-extra-keys-without-x-prefix": CLIOptionMeta(
+        name="--field-extra-keys-without-x-prefix", category=OptionCategory.FIELD
+    ),
+    "--field-include-all-keys": CLIOptionMeta(name="--field-include-all-keys", category=OptionCategory.FIELD),
+    "--extra-fields": CLIOptionMeta(name="--extra-fields", category=OptionCategory.FIELD),
+    "--use-enum-values-in-discriminator": CLIOptionMeta(
+        name="--use-enum-values-in-discriminator", category=OptionCategory.FIELD
+    ),
+    # ==========================================================================
+    # Typing Customization
+    # ==========================================================================
+    "--use-union-operator": CLIOptionMeta(name="--use-union-operator", category=OptionCategory.TYPING),
+    "--use-standard-collections": CLIOptionMeta(name="--use-standard-collections", category=OptionCategory.TYPING),
+    "--use-generic-container-types": CLIOptionMeta(
+        name="--use-generic-container-types", category=OptionCategory.TYPING
+    ),
+    "--use-annotated": CLIOptionMeta(name="--use-annotated", category=OptionCategory.TYPING),
+    "--use-type-alias": CLIOptionMeta(name="--use-type-alias", category=OptionCategory.TYPING),
+    "--strict-types": CLIOptionMeta(name="--strict-types", category=OptionCategory.TYPING),
+    "--enum-field-as-literal": CLIOptionMeta(name="--enum-field-as-literal", category=OptionCategory.TYPING),
+    "--disable-future-imports": CLIOptionMeta(name="--disable-future-imports", category=OptionCategory.TYPING),
+    "--use-pendulum": CLIOptionMeta(name="--use-pendulum", category=OptionCategory.TYPING),
+    "--output-datetime-class": CLIOptionMeta(name="--output-datetime-class", category=OptionCategory.TYPING),
+    "--use-decimal-for-multiple-of": CLIOptionMeta(
+        name="--use-decimal-for-multiple-of", category=OptionCategory.TYPING
+    ),
+    "--use-non-positive-negative-number-constrained-types": CLIOptionMeta(
+        name="--use-non-positive-negative-number-constrained-types", category=OptionCategory.TYPING
+    ),
+    "--use-unique-items-as-set": CLIOptionMeta(name="--use-unique-items-as-set", category=OptionCategory.TYPING),
+    "--type-mappings": CLIOptionMeta(name="--type-mappings", category=OptionCategory.TYPING),
+    "--no-use-specialized-enum": CLIOptionMeta(name="--no-use-specialized-enum", category=OptionCategory.TYPING),
+    "--allof-merge-mode": CLIOptionMeta(name="--allof-merge-mode", category=OptionCategory.TYPING),
+    # ==========================================================================
+    # Template Customization
+    # ==========================================================================
+    "--wrap-string-literal": CLIOptionMeta(name="--wrap-string-literal", category=OptionCategory.TEMPLATE),
+    "--custom-template-dir": CLIOptionMeta(name="--custom-template-dir", category=OptionCategory.TEMPLATE),
+    "--extra-template-data": CLIOptionMeta(name="--extra-template-data", category=OptionCategory.TEMPLATE),
+    "--custom-file-header": CLIOptionMeta(name="--custom-file-header", category=OptionCategory.TEMPLATE),
+    "--custom-file-header-path": CLIOptionMeta(name="--custom-file-header-path", category=OptionCategory.TEMPLATE),
+    "--additional-imports": CLIOptionMeta(name="--additional-imports", category=OptionCategory.TEMPLATE),
+    "--use-double-quotes": CLIOptionMeta(name="--use-double-quotes", category=OptionCategory.TEMPLATE),
+    "--use-exact-imports": CLIOptionMeta(name="--use-exact-imports", category=OptionCategory.TEMPLATE),
+    "--disable-appending-item-suffix": CLIOptionMeta(
+        name="--disable-appending-item-suffix", category=OptionCategory.TEMPLATE
+    ),
+    "--treat-dot-as-module": CLIOptionMeta(name="--treat-dot-as-module", category=OptionCategory.TEMPLATE),
+    "--disable-timestamp": CLIOptionMeta(name="--disable-timestamp", category=OptionCategory.TEMPLATE),
+    "--enable-version-header": CLIOptionMeta(name="--enable-version-header", category=OptionCategory.TEMPLATE),
+    "--enable-command-header": CLIOptionMeta(name="--enable-command-header", category=OptionCategory.TEMPLATE),
+    "--formatters": CLIOptionMeta(name="--formatters", category=OptionCategory.TEMPLATE),
+    "--custom-formatters": CLIOptionMeta(name="--custom-formatters", category=OptionCategory.TEMPLATE),
+    "--custom-formatters-kwargs": CLIOptionMeta(name="--custom-formatters-kwargs", category=OptionCategory.TEMPLATE),
+    # ==========================================================================
+    # OpenAPI-only Options
+    # ==========================================================================
+    "--openapi-scopes": CLIOptionMeta(name="--openapi-scopes", category=OptionCategory.OPENAPI),
+    "--use-operation-id-as-name": CLIOptionMeta(name="--use-operation-id-as-name", category=OptionCategory.OPENAPI),
+    "--read-only-write-only-model-type": CLIOptionMeta(
+        name="--read-only-write-only-model-type", category=OptionCategory.OPENAPI
+    ),
+    "--include-path-parameters": CLIOptionMeta(name="--include-path-parameters", category=OptionCategory.OPENAPI),
+    "--validation": CLIOptionMeta(
+        name="--validation",
+        category=OptionCategory.OPENAPI,
+        deprecated=True,
+        deprecated_message="Use --field-constraints instead",
+    ),
+    # ==========================================================================
+    # General Options
+    # ==========================================================================
+    "--check": CLIOptionMeta(name="--check", category=OptionCategory.GENERAL),
+    "--http-headers": CLIOptionMeta(name="--http-headers", category=OptionCategory.GENERAL),
+    "--http-ignore-tls": CLIOptionMeta(name="--http-ignore-tls", category=OptionCategory.GENERAL),
+    "--http-query-parameters": CLIOptionMeta(name="--http-query-parameters", category=OptionCategory.GENERAL),
+    "--ignore-pyproject": CLIOptionMeta(name="--ignore-pyproject", category=OptionCategory.GENERAL),
+    "--generate-cli-command": CLIOptionMeta(name="--generate-cli-command", category=OptionCategory.GENERAL),
+    "--generate-pyproject-config": CLIOptionMeta(name="--generate-pyproject-config", category=OptionCategory.GENERAL),
+    "--shared-module-name": CLIOptionMeta(name="--shared-module-name", category=OptionCategory.GENERAL),
+    "--all-exports-scope": CLIOptionMeta(name="--all-exports-scope", category=OptionCategory.GENERAL),
+    "--all-exports-collision-strategy": CLIOptionMeta(
+        name="--all-exports-collision-strategy", category=OptionCategory.GENERAL
+    ),
+    "--module-split-mode": CLIOptionMeta(name="--module-split-mode", category=OptionCategory.GENERAL),
+    "--disable-warnings": CLIOptionMeta(name="--disable-warnings", category=OptionCategory.GENERAL),
+    "--watch": CLIOptionMeta(name="--watch", category=OptionCategory.GENERAL),
+    "--watch-delay": CLIOptionMeta(name="--watch-delay", category=OptionCategory.GENERAL),
+}
+
+
+def _canonical_option_key(option: str) -> tuple[int, str]:
+    """Key function for determining canonical option.
+
+    Canonical option is determined by:
+    1. Longest option string (--help over -h)
+    2. Lexicographically last if same length (for stability)
+
+    This ensures deterministic canonical selection.
+    """
+    return (len(option), option)
+
+
+@lru_cache(maxsize=1)
+def _build_alias_map_from_argparse() -> dict[str, str]:
+    """Build alias -> canonical map from argparse (the source of truth).
+
+    The canonical option is the longest option string for each action.
+    If multiple options have the same length, the lexicographically last one is chosen.
+    """
+    from datamodel_code_generator.arguments import arg_parser as argument_parser  # noqa: PLC0415
+
+    alias_map: dict[str, str] = {}
+    for action in argument_parser._actions:  # noqa: SLF001
+        if not action.option_strings:
+            continue  # pragma: no cover
+        # Canonical = longest, then lexicographically last for stability
+        canonical = max(action.option_strings, key=_canonical_option_key)
+        for opt in action.option_strings:
+            alias_map[opt] = canonical
+    return alias_map
+
+
+def get_canonical_option(option: str) -> str:
+    """Normalize an option alias to its canonical form.
+
+    Uses argparse definitions as the source of truth.
+
+    Examples:
+        >>> get_canonical_option("-h")
+        '--help'
+        >>> get_canonical_option("--help")
+        '--help'
+    """
+    return _build_alias_map_from_argparse().get(option, option)
+
+
+@lru_cache(maxsize=1)
+def get_all_canonical_options() -> frozenset[str]:
+    """Get all canonical options from argparse."""
+    return frozenset(_build_alias_map_from_argparse().values())
+
+
+def is_manual_doc(option: str) -> bool:  # pragma: no cover
+    """Check if an option has manual documentation (not auto-generated)."""
+    canonical = get_canonical_option(option)
+    return canonical in MANUAL_DOCS
+
+
+# Backward compatibility alias
+def is_excluded_from_docs(option: str) -> bool:  # pragma: no cover
+    """Check if an option is excluded from auto-generated documentation.
+
+    Deprecated: Use is_manual_doc() instead.
+    """
+    return is_manual_doc(option)
+
+
+def get_option_meta(option: str) -> CLIOptionMeta | None:  # pragma: no cover
+    """Get documentation metadata for an option.
+
+    Always canonicalizes the option name before lookup.
+    If the option is not explicitly registered, returns a default entry
+    with General category (auto-categorization for new options).
+    """
+    canonical = get_canonical_option(option)
+    if canonical in CLI_OPTION_META:
+        return CLI_OPTION_META[canonical]
+    if canonical in get_all_canonical_options() and canonical not in EXCLUDED_FROM_DOCS:
+        return CLIOptionMeta(name=canonical, category=OptionCategory.GENERAL)
+    return None
diff -pruN 0.26.4-3/src/datamodel_code_generator/format.py 0.45.0-1/src/datamodel_code_generator/format.py
--- 0.26.4-3/src/datamodel_code_generator/format.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/format.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,349 @@
+"""Code formatting utilities and Python version handling.
+
+Provides CodeFormatter for applying black, isort, and ruff formatting,
+along with PythonVersion enum and DatetimeClassType for output configuration.
+"""
+
+from __future__ import annotations
+
+import subprocess  # noqa: S404
+from enum import Enum
+from functools import cached_property, lru_cache
+from importlib import import_module
+from pathlib import Path
+from typing import TYPE_CHECKING, Any
+from warnings import warn
+
+from datamodel_code_generator.util import load_toml
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+
+@lru_cache(maxsize=1)
+def _get_black() -> Any:
+    import black as _black  # noqa: PLC0415
+
+    return _black
+
+
+@lru_cache(maxsize=1)
+def _get_black_mode() -> Any:  # pragma: no cover
+    black = _get_black()
+    try:
+        import black.mode  # noqa: PLC0415
+    except ImportError:
+        return None
+    else:
+        return black.mode
+
+
+@lru_cache(maxsize=1)
+def _get_isort() -> Any:
+    import isort as _isort  # noqa: PLC0415
+
+    return _isort
+
+
+class DatetimeClassType(Enum):
+    """Output datetime class type options."""
+
+    Datetime = "datetime"
+    Awaredatetime = "AwareDatetime"
+    Naivedatetime = "NaiveDatetime"
+
+
+class PythonVersion(Enum):
+    """Supported Python version targets for code generation."""
+
+    PY_39 = "3.9"
+    PY_310 = "3.10"
+    PY_311 = "3.11"
+    PY_312 = "3.12"
+    PY_313 = "3.13"
+    PY_314 = "3.14"
+
+    @cached_property
+    def _is_py_310_or_later(self) -> bool:  # pragma: no cover
+        return self.value != self.PY_39.value
+
+    @cached_property
+    def _is_py_311_or_later(self) -> bool:  # pragma: no cover
+        return self.value not in {self.PY_39.value, self.PY_310.value}
+
+    @cached_property
+    def _is_py_312_or_later(self) -> bool:  # pragma: no cover
+        return self.value not in {self.PY_39.value, self.PY_310.value, self.PY_311.value}
+
+    @cached_property
+    def _is_py_314_or_later(self) -> bool:
+        return self.value not in {
+            self.PY_39.value,
+            self.PY_310.value,
+            self.PY_311.value,
+            self.PY_312.value,
+            self.PY_313.value,
+        }
+
+    @property
+    def has_union_operator(self) -> bool:  # pragma: no cover
+        """Check if Python version supports the union operator (|)."""
+        return self._is_py_310_or_later
+
+    @property
+    def has_typed_dict_non_required(self) -> bool:
+        """Check if Python version supports TypedDict NotRequired."""
+        return self._is_py_311_or_later
+
+    @property
+    def has_kw_only_dataclass(self) -> bool:
+        """Check if Python version supports kw_only in dataclasses."""
+        return self._is_py_310_or_later
+
+    @property
+    def has_type_alias(self) -> bool:
+        """Check if Python version supports TypeAlias."""
+        return self._is_py_310_or_later
+
+    @property
+    def has_type_statement(self) -> bool:
+        """Check if Python version supports type statements."""
+        return self._is_py_312_or_later
+
+    @property
+    def has_native_deferred_annotations(self) -> bool:
+        """Check if Python version has native deferred annotations (Python 3.14+)."""
+        return self._is_py_314_or_later
+
+    @property
+    def has_strenum(self) -> bool:
+        """Check if Python version supports StrEnum."""
+        return self._is_py_311_or_later
+
+
+PythonVersionMin = PythonVersion.PY_39
+
+
+@lru_cache(maxsize=1)
+def _get_black_python_version_map() -> dict[PythonVersion, Any]:
+    black = _get_black()
+    return {
+        v: getattr(black.TargetVersion, f"PY{v.name.split('_')[-1]}")
+        for v in PythonVersion
+        if hasattr(black.TargetVersion, f"PY{v.name.split('_')[-1]}")
+    }
+
+
+def is_supported_in_black(python_version: PythonVersion) -> bool:  # pragma: no cover
+    """Check if a Python version is supported by the installed black version."""
+    return python_version in _get_black_python_version_map()
+
+
+def black_find_project_root(sources: Sequence[Path]) -> Path:
+    """Find the project root directory for black configuration."""
+    from black import find_project_root as _find_project_root  # noqa: PLC0415
+
+    project_root = _find_project_root(tuple(str(s) for s in sources))
+    if isinstance(project_root, tuple):
+        return project_root[0]
+    return project_root  # pragma: no cover
+
+
+class Formatter(Enum):
+    """Available code formatters for generated output."""
+
+    BLACK = "black"
+    ISORT = "isort"
+    RUFF_CHECK = "ruff-check"
+    RUFF_FORMAT = "ruff-format"
+
+
+DEFAULT_FORMATTERS = [Formatter.BLACK, Formatter.ISORT]
+
+
+class CodeFormatter:
+    """Formats generated code using black, isort, ruff, and custom formatters."""
+
+    def __init__(  # noqa: PLR0912, PLR0913, PLR0917
+        self,
+        python_version: PythonVersion,
+        settings_path: Path | None = None,
+        wrap_string_literal: bool | None = None,  # noqa: FBT001
+        skip_string_normalization: bool = True,  # noqa: FBT001, FBT002
+        known_third_party: list[str] | None = None,
+        custom_formatters: list[str] | None = None,
+        custom_formatters_kwargs: dict[str, Any] | None = None,
+        encoding: str = "utf-8",
+        formatters: list[Formatter] = DEFAULT_FORMATTERS,
+    ) -> None:
+        """Initialize code formatter with configuration for black, isort, ruff, and custom formatters."""
+        if not settings_path:
+            settings_path = Path.cwd()
+        elif settings_path.is_file():
+            settings_path = settings_path.parent
+        elif not settings_path.exists():
+            for parent in settings_path.parents:
+                if parent.exists():
+                    settings_path = parent
+                    break
+            else:
+                settings_path = Path.cwd()  # pragma: no cover
+
+        root = black_find_project_root((settings_path,))
+        path = root / "pyproject.toml"
+        if path.is_file():
+            pyproject_toml = load_toml(path)
+            config = pyproject_toml.get("tool", {}).get("black", {})
+        else:
+            config = {}
+
+        black = _get_black()
+        black_mode = _get_black_mode()
+        isort = _get_isort()
+
+        black_kwargs: dict[str, Any] = {}
+        if wrap_string_literal is not None:
+            experimental_string_processing = wrap_string_literal
+        elif black.__version__ < "24.1.0":
+            experimental_string_processing = config.get("experimental-string-processing")
+        else:
+            experimental_string_processing = config.get("preview", False) and (  # pragma: no cover
+                config.get("unstable", False) or "string_processing" in config.get("enable-unstable-feature", [])
+            )
+
+        if experimental_string_processing is not None:  # pragma: no cover
+            if black.__version__.startswith("19."):
+                warn(
+                    f"black doesn't support `experimental-string-processing` option"
+                    f" for wrapping string literal in {black.__version__}",
+                    stacklevel=2,
+                )
+            elif black.__version__ < "24.1.0":
+                black_kwargs["experimental_string_processing"] = experimental_string_processing
+            elif experimental_string_processing:
+                black_kwargs["preview"] = True
+                black_kwargs["unstable"] = config.get("unstable", False)
+                black_kwargs["enabled_features"] = {black_mode.Preview.string_processing}
+
+        self.black_mode = black.FileMode(
+            target_versions={_get_black_python_version_map()[python_version]},
+            line_length=config.get("line-length", black.DEFAULT_LINE_LENGTH),
+            string_normalization=not skip_string_normalization or not config.get("skip-string-normalization", True),
+            **black_kwargs,
+        )
+
+        self.settings_path: str = str(settings_path)
+
+        self.isort_config_kwargs: dict[str, Any] = {}
+        if known_third_party:
+            self.isort_config_kwargs["known_third_party"] = known_third_party
+
+        if isort.__version__.startswith("4."):  # pragma: no cover
+            self.isort_config = None
+        else:
+            self.isort_config = isort.Config(settings_path=self.settings_path, **self.isort_config_kwargs)
+
+        self.custom_formatters_kwargs = custom_formatters_kwargs or {}
+        self.custom_formatters = self._check_custom_formatters(custom_formatters)
+        self.encoding = encoding
+        self.formatters = formatters
+
+    def _load_custom_formatter(self, custom_formatter_import: str) -> CustomCodeFormatter:
+        """Load and instantiate a custom formatter from a module path."""
+        import_ = import_module(custom_formatter_import)
+
+        if not hasattr(import_, "CodeFormatter"):
+            msg = f"Custom formatter module `{import_.__name__}` must contains object with name CodeFormatter"
+            raise NameError(msg)
+
+        formatter_class = import_.__getattribute__("CodeFormatter")  # noqa: PLC2801
+
+        if not issubclass(formatter_class, CustomCodeFormatter):
+            msg = f"The custom module {custom_formatter_import} must inherit from `datamodel-code-generator`"
+            raise TypeError(msg)
+
+        return formatter_class(formatter_kwargs=self.custom_formatters_kwargs)
+
+    def _check_custom_formatters(self, custom_formatters: list[str] | None) -> list[CustomCodeFormatter]:
+        """Validate and load all custom formatters."""
+        if custom_formatters is None:
+            return []
+
+        return [self._load_custom_formatter(custom_formatter_import) for custom_formatter_import in custom_formatters]
+
+    def format_code(
+        self,
+        code: str,
+    ) -> str:
+        """Apply all configured formatters to the code string."""
+        if Formatter.ISORT in self.formatters:
+            code = self.apply_isort(code)
+        if Formatter.BLACK in self.formatters:
+            code = self.apply_black(code)
+
+        if Formatter.RUFF_CHECK in self.formatters:
+            code = self.apply_ruff_lint(code)
+
+        if Formatter.RUFF_FORMAT in self.formatters:
+            code = self.apply_ruff_formatter(code)
+
+        for formatter in self.custom_formatters:
+            code = formatter.apply(code)
+
+        return code
+
+    def apply_black(self, code: str) -> str:
+        """Format code using black."""
+        black = _get_black()
+        return black.format_str(
+            code,
+            mode=self.black_mode,
+        )
+
+    def apply_ruff_lint(self, code: str) -> str:
+        """Run ruff check with auto-fix on code."""
+        result = subprocess.run(
+            ("ruff", "check", "--fix", "-"),
+            input=code.encode(self.encoding),
+            capture_output=True,
+            check=False,
+            cwd=self.settings_path,
+        )
+        return result.stdout.decode(self.encoding)
+
+    def apply_ruff_formatter(self, code: str) -> str:
+        """Format code using ruff format."""
+        result = subprocess.run(
+            ("ruff", "format", "-"),
+            input=code.encode(self.encoding),
+            capture_output=True,
+            check=False,
+            cwd=self.settings_path,
+        )
+        return result.stdout.decode(self.encoding)
+
+    def apply_isort(self, code: str) -> str:
+        """Sort imports using isort."""
+        isort = _get_isort()
+        if self.isort_config is None:  # pragma: no cover
+            return isort.SortImports(
+                file_contents=code,
+                settings_path=self.settings_path,
+                **self.isort_config_kwargs,
+            ).output
+        return isort.code(code, config=self.isort_config)
+
+
+class CustomCodeFormatter:
+    """Base class for custom code formatters.
+
+    Subclasses must implement the apply() method to transform code.
+    """
+
+    def __init__(self, formatter_kwargs: dict[str, Any]) -> None:
+        """Initialize custom formatter with optional keyword arguments."""
+        self.formatter_kwargs = formatter_kwargs
+
+    def apply(self, code: str) -> str:
+        """Apply formatting to code. Must be implemented by subclasses."""
+        raise NotImplementedError
diff -pruN 0.26.4-3/src/datamodel_code_generator/http.py 0.45.0-1/src/datamodel_code_generator/http.py
--- 0.26.4-3/src/datamodel_code_generator/http.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/http.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,91 @@
+"""HTTP utilities for fetching remote schema files.
+
+Provides functions to fetch schema content from URLs and join URL references.
+HTTP(S) URLs require the 'http' extra: `pip install 'datamodel-code-generator[http]'`.
+file:// URLs are handled without additional dependencies.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+
+def _get_httpx() -> Any:
+    """Lazily import httpx, raising a helpful error if not installed."""
+    try:
+        import httpx  # noqa: PLC0415
+    except ImportError as exc:  # pragma: no cover
+        msg = "Please run `$pip install 'datamodel-code-generator[http]`' to resolve HTTP(S) URL references"
+        raise Exception(msg) from exc  # noqa: TRY002
+    return httpx
+
+
+def get_body(
+    url: str,
+    headers: Sequence[tuple[str, str]] | None = None,
+    ignore_tls: bool = False,  # noqa: FBT001, FBT002
+    query_parameters: Sequence[tuple[str, str]] | None = None,
+) -> str:
+    """Fetch content from a URL with optional headers and query parameters."""
+    httpx = _get_httpx()
+    return httpx.get(
+        url,
+        headers=headers,
+        verify=not ignore_tls,
+        follow_redirects=True,
+        params=query_parameters,  # pyright: ignore[reportArgumentType]
+        # TODO: Improve params type
+    ).text
+
+
+def join_url(url: str, ref: str = ".") -> str:  # noqa: PLR0912
+    """Join a base URL with a relative reference."""
+    if url.startswith("file://"):
+        from urllib.parse import urlparse  # noqa: PLC0415
+
+        parsed = urlparse(url)
+
+        if ref.startswith("file://"):
+            return ref
+
+        ref_path, *frag = ref.split("#", 1)
+
+        # Fragment-only ref: keep the original path
+        if not ref_path:
+            joined = url.split("#", maxsplit=1)[0]
+            if frag:
+                joined += f"#{frag[0]}"
+            return joined
+
+        if ref_path.startswith("/"):
+            joined_path = ref_path
+        else:
+            base_segments = parsed.path.lstrip("/").split("/")
+            if base_segments and not base_segments[0]:
+                base_segments = []
+            if base_segments:
+                base_segments = base_segments[:-1]
+
+            min_depth = 1 if parsed.netloc else 0
+            for segment in ref_path.split("/"):
+                if segment in {"", "."}:
+                    continue
+                if segment == "..":
+                    if len(base_segments) > min_depth:
+                        base_segments.pop()
+                    continue
+                base_segments.append(segment)
+
+            joined_path = "/" + "/".join(base_segments)
+            if ref_path.endswith("/"):
+                joined_path += "/"
+
+        joined = f"file://{parsed.netloc}{joined_path}"
+        if frag:
+            joined += f"#{frag[0]}"
+        return joined
+    httpx = _get_httpx()
+    return str(httpx.URL(url).join(ref))
diff -pruN 0.26.4-3/src/datamodel_code_generator/imports.py 0.45.0-1/src/datamodel_code_generator/imports.py
--- 0.26.4-3/src/datamodel_code_generator/imports.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/imports.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,188 @@
+"""Import management system for generated code.
+
+Provides Import and Imports classes to track, organize, and render
+Python import statements for generated data models.
+"""
+
+from __future__ import annotations
+
+from collections import defaultdict
+from functools import lru_cache
+from itertools import starmap
+from typing import TYPE_CHECKING, Optional
+
+from datamodel_code_generator.util import BaseModel
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable
+
+
+class Import(BaseModel):
+    """Represents a single Python import statement."""
+
+    from_: Optional[str] = None  # noqa: UP045
+    import_: str
+    alias: Optional[str] = None  # noqa: UP045
+    reference_path: Optional[str] = None  # noqa: UP045
+
+    @property
+    def is_future(self) -> bool:
+        """Check if this is a __future__ import."""
+        return self.from_ == "__future__"
+
+    @classmethod
+    @lru_cache
+    def from_full_path(cls, class_path: str) -> Import:
+        """Create an Import from a fully qualified path (e.g., 'typing.Optional')."""
+        split_class_path: list[str] = class_path.split(".")
+        return Import(from_=".".join(split_class_path[:-1]) or None, import_=split_class_path[-1])
+
+
+class Imports(defaultdict[Optional[str], set[str]]):
+    """Collection of imports with reference counting and alias support."""
+
+    def __str__(self) -> str:
+        """Return formatted import statements."""
+        return self.dump()
+
+    def __init__(self, use_exact: bool = False) -> None:  # noqa: FBT001, FBT002
+        """Initialize empty import collection."""
+        super().__init__(set)
+        self.alias: defaultdict[str | None, dict[str, str]] = defaultdict(dict)
+        self.counter: dict[tuple[str | None, str], int] = defaultdict(int)
+        self.reference_paths: dict[str, Import] = {}
+        self.use_exact: bool = use_exact
+        self._exports: set[str] | None = None
+
+    def _set_alias(self, from_: str | None, imports: set[str]) -> list[str]:
+        """Apply aliases to imports and return sorted list."""
+        return [
+            f"{i} as {self.alias[from_][i]}" if i in self.alias[from_] and i != self.alias[from_][i] else i
+            for i in sorted(imports)
+        ]
+
+    def create_line(self, from_: str | None, imports: set[str]) -> str:
+        """Create a single import line from module and names."""
+        if from_:
+            return f"from {from_} import {', '.join(self._set_alias(from_, imports))}"
+        return "\n".join(f"import {i}" for i in self._set_alias(from_, imports))
+
+    def dump(self) -> str:
+        """Render all imports as a string."""
+        return "\n".join(starmap(self.create_line, self.items()))
+
+    def append(self, imports: Import | Iterable[Import] | None) -> None:
+        """Add one or more imports to the collection."""
+        if imports:
+            if isinstance(imports, Import):
+                imports = [imports]
+            for import_ in imports:
+                if import_.reference_path:
+                    self.reference_paths[import_.reference_path] = import_
+                if "." in import_.import_:
+                    self[None].add(import_.import_)
+                    self.counter[None, import_.import_] += 1
+                else:
+                    self[import_.from_].add(import_.import_)
+                    self.counter[import_.from_, import_.import_] += 1
+                    if import_.alias:
+                        self.alias[import_.from_][import_.import_] = import_.alias
+
+    def remove(self, imports: Import | Iterable[Import]) -> None:
+        """Remove one or more imports from the collection."""
+        if isinstance(imports, Import):  # pragma: no cover
+            imports = [imports]
+        for import_ in imports:
+            if "." in import_.import_:  # pragma: no cover
+                self.counter[None, import_.import_] -= 1
+                if self.counter[None, import_.import_] == 0:  # pragma: no cover
+                    self[None].remove(import_.import_)
+                    if not self[None]:
+                        del self[None]
+            else:
+                self.counter[import_.from_, import_.import_] -= 1  # pragma: no cover
+                if self.counter[import_.from_, import_.import_] == 0:  # pragma: no cover
+                    self[import_.from_].remove(import_.import_)
+                    if not self[import_.from_]:
+                        del self[import_.from_]
+                    if import_.alias:  # pragma: no cover
+                        del self.alias[import_.from_][import_.import_]
+                        if not self.alias[import_.from_]:
+                            del self.alias[import_.from_]
+
+    def remove_referenced_imports(self, reference_path: str) -> None:
+        """Remove imports associated with a reference path."""
+        if reference_path in self.reference_paths:
+            self.remove(self.reference_paths[reference_path])
+
+    def extract_future(self) -> Imports:
+        """Extract and remove __future__ imports, returning them as a new Imports."""
+        future = Imports(self.use_exact)
+        future_key = "__future__"
+        if future_key in self:
+            future[future_key] = self.pop(future_key)
+            for key in list(self.counter.keys()):
+                if key[0] == future_key:
+                    future.counter[key] = self.counter.pop(key)
+            if future_key in self.alias:
+                future.alias[future_key] = self.alias.pop(future_key)
+        return future
+
+    def add_export(self, name: str) -> None:
+        """Add a name to export without importing it (for local definitions)."""
+        if self._exports is None:
+            self._exports = set()
+        self._exports.add(name)
+
+    def dump_all(self, *, multiline: bool = False) -> str:
+        """Generate __all__ declaration from imported names and added exports.
+
+        Args:
+            multiline: If True, format with one name per line
+
+        Returns:
+            Formatted __all__ = [...] string
+        """
+        name_set: set[str] = (self._exports or set()).copy()
+        for from_, imports in self.items():
+            name_set.update(self.alias.get(from_, {}).get(import_) or import_ for import_ in imports)
+        name_list = sorted(name_set)
+        if multiline:
+            items = ",\n    ".join(f'"{name}"' for name in name_list)
+            return f"__all__ = [\n    {items},\n]"
+        items = ", ".join(f'"{name}"' for name in name_list)
+        return f"__all__ = [{items}]"
+
+
+IMPORT_ANNOTATED = Import.from_full_path("typing.Annotated")
+IMPORT_ANY = Import.from_full_path("typing.Any")
+IMPORT_LIST = Import.from_full_path("typing.List")
+IMPORT_SET = Import.from_full_path("typing.Set")
+IMPORT_UNION = Import.from_full_path("typing.Union")
+IMPORT_OPTIONAL = Import.from_full_path("typing.Optional")
+IMPORT_LITERAL = Import.from_full_path("typing.Literal")
+IMPORT_TYPE_ALIAS = Import.from_full_path("typing.TypeAlias")
+IMPORT_TYPE_ALIAS_BACKPORT = Import.from_full_path("typing_extensions.TypeAlias")
+IMPORT_TYPE_ALIAS_TYPE = Import.from_full_path("typing_extensions.TypeAliasType")
+IMPORT_SEQUENCE = Import.from_full_path("typing.Sequence")
+IMPORT_FROZEN_SET = Import.from_full_path("typing.FrozenSet")
+IMPORT_MAPPING = Import.from_full_path("typing.Mapping")
+IMPORT_ABC_SEQUENCE = Import.from_full_path("collections.abc.Sequence")
+IMPORT_ABC_SET = Import.from_full_path("collections.abc.Set")
+IMPORT_ABC_MAPPING = Import.from_full_path("collections.abc.Mapping")
+IMPORT_ENUM = Import.from_full_path("enum.Enum")
+IMPORT_STR_ENUM = Import.from_full_path("enum.StrEnum")
+IMPORT_INT_ENUM = Import.from_full_path("enum.IntEnum")
+IMPORT_ANNOTATIONS = Import.from_full_path("__future__.annotations")
+IMPORT_DICT = Import.from_full_path("typing.Dict")
+IMPORT_DECIMAL = Import.from_full_path("decimal.Decimal")
+IMPORT_DATE = Import.from_full_path("datetime.date")
+IMPORT_DATETIME = Import.from_full_path("datetime.datetime")
+IMPORT_TIMEDELTA = Import.from_full_path("datetime.timedelta")
+IMPORT_PATH = Import.from_full_path("pathlib.Path")
+IMPORT_TIME = Import.from_full_path("datetime.time")
+IMPORT_UUID = Import.from_full_path("uuid.UUID")
+IMPORT_PENDULUM_DATE = Import.from_full_path("pendulum.Date")
+IMPORT_PENDULUM_DATETIME = Import.from_full_path("pendulum.DateTime")
+IMPORT_PENDULUM_DURATION = Import.from_full_path("pendulum.Duration")
+IMPORT_PENDULUM_TIME = Import.from_full_path("pendulum.Time")
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/__init__.py 0.45.0-1/src/datamodel_code_generator/model/__init__.py
--- 0.26.4-3/src/datamodel_code_generator/model/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,140 @@
+"""Model generation module.
+
+Provides factory functions and classes for generating different output formats
+(Pydantic, dataclasses, TypedDict, msgspec) based on configuration.
+"""
+
+from __future__ import annotations
+
+import sys
+from typing import TYPE_CHECKING, Callable, NamedTuple
+
+from datamodel_code_generator import PythonVersion
+
+from .base import ConstraintsBase, DataModel, DataModelFieldBase
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable
+
+    from datamodel_code_generator import DataModelType
+    from datamodel_code_generator.types import DataTypeManager as DataTypeManagerABC
+
+DEFAULT_TARGET_PYTHON_VERSION = PythonVersion(f"{sys.version_info.major}.{sys.version_info.minor}")
+
+
+class DataModelSet(NamedTuple):
+    """Collection of model types needed for a specific output format."""
+
+    data_model: type[DataModel]
+    root_model: type[DataModel]
+    field_model: type[DataModelFieldBase]
+    data_type_manager: type[DataTypeManagerABC]
+    dump_resolve_reference_action: Callable[[Iterable[str]], str] | None
+    scalar_model: type[DataModel]
+    union_model: type[DataModel]
+    known_third_party: list[str] | None = None
+
+
+def get_data_model_types(
+    data_model_type: DataModelType,
+    target_python_version: PythonVersion = DEFAULT_TARGET_PYTHON_VERSION,
+    use_type_alias: bool = False,  # noqa: FBT001, FBT002
+) -> DataModelSet:
+    """Get the appropriate model types for the given output format and Python version."""
+    from datamodel_code_generator import DataModelType  # noqa: PLC0415
+
+    from . import (  # noqa: PLC0415
+        dataclass,
+        msgspec,
+        pydantic,
+        pydantic_v2,
+        scalar,
+        type_alias,
+        typed_dict,
+        union,
+    )
+    from .types import DataTypeManager  # noqa: PLC0415
+
+    # Pydantic v2 requires TypeAliasType; other output types use TypeAlias for better compatibility
+    if data_model_type == DataModelType.PydanticV2BaseModel:
+        if target_python_version.has_type_statement:
+            type_alias_class = type_alias.TypeStatement
+            scalar_class = scalar.DataTypeScalarTypeStatement
+            union_class = union.DataTypeUnionTypeStatement
+        else:
+            type_alias_class = type_alias.TypeAliasTypeBackport
+            scalar_class = scalar.DataTypeScalarTypeBackport
+            union_class = union.DataTypeUnionTypeBackport
+    elif target_python_version.has_type_statement:
+        type_alias_class = type_alias.TypeStatement
+        scalar_class = scalar.DataTypeScalarTypeStatement
+        union_class = union.DataTypeUnionTypeStatement
+    elif target_python_version.has_type_alias:
+        type_alias_class = type_alias.TypeAlias
+        scalar_class = scalar.DataTypeScalar
+        union_class = union.DataTypeUnion
+    else:
+        type_alias_class = type_alias.TypeAliasBackport
+        scalar_class = scalar.DataTypeScalarBackport
+        union_class = union.DataTypeUnionBackport
+
+    if data_model_type == DataModelType.PydanticBaseModel:
+        return DataModelSet(
+            data_model=pydantic.BaseModel,
+            root_model=type_alias_class if use_type_alias else pydantic.CustomRootType,
+            field_model=pydantic.DataModelField,
+            data_type_manager=pydantic.DataTypeManager,
+            dump_resolve_reference_action=pydantic.dump_resolve_reference_action,
+            scalar_model=scalar_class,
+            union_model=union_class,
+        )
+    if data_model_type == DataModelType.PydanticV2BaseModel:
+        return DataModelSet(
+            data_model=pydantic_v2.BaseModel,
+            root_model=type_alias_class if use_type_alias else pydantic_v2.RootModel,
+            field_model=pydantic_v2.DataModelField,
+            data_type_manager=pydantic_v2.DataTypeManager,
+            dump_resolve_reference_action=pydantic_v2.dump_resolve_reference_action,
+            scalar_model=scalar_class,
+            union_model=union_class,
+        )
+    if data_model_type == DataModelType.DataclassesDataclass:
+        return DataModelSet(
+            data_model=dataclass.DataClass,
+            root_model=type_alias_class,
+            field_model=dataclass.DataModelField,
+            data_type_manager=dataclass.DataTypeManager,
+            dump_resolve_reference_action=None,
+            scalar_model=scalar_class,
+            union_model=union_class,
+        )
+    if data_model_type == DataModelType.TypingTypedDict:
+        return DataModelSet(
+            data_model=typed_dict.TypedDict,
+            root_model=type_alias_class,
+            field_model=(
+                typed_dict.DataModelField
+                if target_python_version.has_typed_dict_non_required
+                else typed_dict.DataModelFieldBackport
+            ),
+            data_type_manager=DataTypeManager,
+            dump_resolve_reference_action=None,
+            scalar_model=scalar_class,
+            union_model=union_class,
+        )
+    if data_model_type == DataModelType.MsgspecStruct:
+        return DataModelSet(
+            data_model=msgspec.Struct,
+            root_model=type_alias_class,
+            field_model=msgspec.DataModelField,
+            data_type_manager=msgspec.DataTypeManager,
+            dump_resolve_reference_action=None,
+            known_third_party=["msgspec"],
+            scalar_model=scalar_class,
+            union_model=union_class,
+        )
+    msg = f"{data_model_type} is unsupported data model type"  # pragma: no cover
+    raise ValueError(msg)  # pragma: no cover
+
+
+__all__ = ["ConstraintsBase", "DataModel", "DataModelFieldBase"]
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/base.py 0.45.0-1/src/datamodel_code_generator/model/base.py
--- 0.26.4-3/src/datamodel_code_generator/model/base.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/base.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,697 @@
+"""Base classes for data model generation.
+
+Provides ConstraintsBase for field constraints, DataModelFieldBase for field
+representation, and DataModel as the abstract base for all model types.
+"""
+
+from __future__ import annotations
+
+import re
+from abc import ABC, abstractmethod
+from collections import defaultdict
+from copy import deepcopy
+from dataclasses import dataclass
+from functools import cached_property, lru_cache
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, ClassVar, Optional, TypeVar, Union
+from warnings import warn
+
+from jinja2 import Environment, FileSystemLoader, Template
+from pydantic import Field
+from typing_extensions import Self
+
+from datamodel_code_generator.imports import (
+    IMPORT_ANNOTATED,
+    IMPORT_OPTIONAL,
+    IMPORT_UNION,
+    Import,
+)
+from datamodel_code_generator.reference import Reference, _BaseModel
+from datamodel_code_generator.types import (
+    ANY,
+    NONE,
+    OPTIONAL_PREFIX,
+    UNION_PREFIX,
+    DataType,
+    Nullable,
+    chain_as_tuple,
+    get_optional_type,
+)
+from datamodel_code_generator.util import PYDANTIC_V2, ConfigDict
+
+if TYPE_CHECKING:
+    from collections.abc import Iterator
+
+    from datamodel_code_generator import DataclassArguments
+
+TEMPLATE_DIR: Path = Path(__file__).parents[0] / "template"
+
+ALL_MODEL: str = "#all#"
+
+
+def repr_set_sorted(value: set[Any]) -> str:
+    """Return a repr of a set with elements sorted for consistent output.
+
+    Uses (type_name, repr(x)) as sort key to safely handle any type including
+    Enum, custom classes, or types without __lt__ defined.
+    """
+    if not value:
+        return "set()"
+    # Sort by type name first, then by repr for consistent output
+    sorted_elements = sorted(value, key=lambda x: (type(x).__name__, repr(x)))
+    return "{" + ", ".join(repr(e) for e in sorted_elements) + "}"
+
+
+ConstraintsBaseT = TypeVar("ConstraintsBaseT", bound="ConstraintsBase")
+DataModelFieldBaseT = TypeVar("DataModelFieldBaseT", bound="DataModelFieldBase")
+
+
+class ConstraintsBase(_BaseModel):
+    """Base class for field constraints (min/max, patterns, etc.)."""
+
+    unique_items: Optional[bool] = Field(None, alias="uniqueItems")  # noqa: UP045
+    _exclude_fields: ClassVar[set[str]] = {"has_constraints"}
+    if PYDANTIC_V2:
+        model_config = ConfigDict(  # pyright: ignore[reportAssignmentType]
+            arbitrary_types_allowed=True, ignored_types=(cached_property,)
+        )
+    else:
+
+        class Config:
+            """Pydantic v1 configuration for ConstraintsBase."""
+
+            arbitrary_types_allowed = True
+            keep_untouched = (cached_property,)
+
+    @cached_property
+    def has_constraints(self) -> bool:
+        """Check if any constraint values are set."""
+        return any(v is not None for v in self.dict().values())
+
+    @staticmethod
+    def merge_constraints(a: ConstraintsBaseT | None, b: ConstraintsBaseT | None) -> ConstraintsBaseT | None:
+        """Merge two constraint objects, with b taking precedence over a."""
+        constraints_class = None
+        if isinstance(a, ConstraintsBase):  # pragma: no cover
+            root_type_field_constraints = {k: v for k, v in a.dict(by_alias=True).items() if v is not None}
+            constraints_class = a.__class__
+        else:
+            root_type_field_constraints = {}  # pragma: no cover
+
+        if isinstance(b, ConstraintsBase):  # pragma: no cover
+            model_field_constraints = {k: v for k, v in b.dict(by_alias=True).items() if v is not None}
+            constraints_class = constraints_class or b.__class__
+        else:
+            model_field_constraints = {}
+
+        if constraints_class is None or not issubclass(constraints_class, ConstraintsBase):  # pragma: no cover
+            return None
+
+        return constraints_class.parse_obj({
+            **root_type_field_constraints,
+            **model_field_constraints,
+        })
+
+
+@dataclass(repr=False)
+class WrappedDefault:
+    """Represents a default value wrapped with its type constructor."""
+
+    value: Any
+    type_name: str
+
+    def __repr__(self) -> str:
+        """Return type constructor representation, e.g., 'CountType(10)'."""
+        return f"{self.type_name}({self.value!r})"
+
+
+class DataModelFieldBase(_BaseModel):
+    """Base class for model field representation and rendering."""
+
+    if PYDANTIC_V2:
+        model_config = ConfigDict(  # pyright: ignore[reportAssignmentType]
+            arbitrary_types_allowed=True,
+            defer_build=True,
+        )
+    else:
+
+        class Config:
+            """Pydantic v1 configuration for DataModelFieldBase."""
+
+            arbitrary_types_allowed = True
+
+    name: Optional[str] = None  # noqa: UP045
+    default: Optional[Any] = None  # noqa: UP045
+    required: bool = False
+    alias: Optional[str] = None  # noqa: UP045
+    data_type: DataType
+    constraints: Any = None
+    strip_default_none: bool = False
+    nullable: Optional[bool] = None  # noqa: UP045
+    parent: Optional[DataModel] = None  # noqa: UP045
+    extras: dict[str, Any] = Field(default_factory=dict)
+    use_annotated: bool = False
+    use_serialize_as_any: bool = False
+    has_default: bool = False
+    use_field_description: bool = False
+    use_inline_field_description: bool = False
+    const: bool = False
+    original_name: Optional[str] = None  # noqa: UP045
+    use_default_kwarg: bool = False
+    use_one_literal_as_default: bool = False
+    _exclude_fields: ClassVar[set[str]] = {"parent"}
+    _pass_fields: ClassVar[set[str]] = {"parent", "data_type"}
+    can_have_extra_keys: ClassVar[bool] = True
+    type_has_null: Optional[bool] = None  # noqa: UP045
+    read_only: bool = False
+    write_only: bool = False
+    use_frozen_field: bool = False
+
+    if not TYPE_CHECKING:
+        if not PYDANTIC_V2:
+
+            @classmethod
+            def model_rebuild(
+                cls,
+                *,
+                _types_namespace: dict[str, type] | None = None,
+            ) -> None:
+                """Update forward references for Pydantic v1."""
+                localns = _types_namespace or {}
+                cls.update_forward_refs(**localns)
+
+        def __init__(self, **data: Any) -> None:
+            """Initialize the field and set up parent relationships."""
+            super().__init__(**data)
+            if self.data_type.reference or self.data_type.data_types:
+                self.data_type.parent = self
+            self.process_const()
+
+    def process_const(self) -> None:
+        """Process const values by setting them as defaults."""
+        if "const" not in self.extras:
+            return
+        self.default = self.extras["const"]
+        self.const = True
+        self.required = False
+        self.nullable = False
+
+    def _process_const_as_literal(self) -> None:
+        """Process const values by converting to literal type. Used by subclasses."""
+        if "const" not in self.extras:
+            return
+        const = self.extras["const"]
+        self.const = True
+        self.nullable = False
+        self.replace_data_type(self.data_type.__class__(literals=[const]), clear_old_parent=False)
+        if not self.default:
+            self.default = const
+
+    def self_reference(self) -> bool:
+        """Check if field references its parent model."""
+        if self.parent is None or not self.parent.reference:  # pragma: no cover
+            return False
+        return self.parent.reference.path in {d.reference.path for d in self.data_type.all_data_types if d.reference}
+
+    @property
+    def type_hint(self) -> str:  # noqa: PLR0911
+        """Get the type hint string for this field, including nullability."""
+        type_hint = self.data_type.type_hint
+
+        if not type_hint:
+            return NONE
+        if self.has_default_factory or (self.data_type.is_optional and self.data_type.type != ANY):
+            return type_hint
+        if self.nullable is not None:
+            if self.nullable:
+                return get_optional_type(type_hint, self.data_type.use_union_operator)
+            return type_hint
+        if self.required:
+            if self.type_has_null:
+                return get_optional_type(type_hint, self.data_type.use_union_operator)
+            return type_hint
+        if self.fall_back_to_nullable:
+            return get_optional_type(type_hint, self.data_type.use_union_operator)
+        return type_hint
+
+    @property
+    def imports(self) -> tuple[Import, ...]:
+        """Get all imports required for this field's type hint."""
+        type_hint = self.type_hint
+        has_union = not self.data_type.use_union_operator and UNION_PREFIX in type_hint
+        has_optional = OPTIONAL_PREFIX in type_hint
+        imports: list[tuple[Import] | Iterator[Import]] = [
+            iter(
+                i
+                for i in self.data_type.all_imports
+                if not ((not has_union and i == IMPORT_UNION) or (not has_optional and i == IMPORT_OPTIONAL))
+            )
+        ]
+
+        if has_optional:
+            imports.append((IMPORT_OPTIONAL,))
+        if self.use_annotated and self.needs_annotated_import:
+            imports.append((IMPORT_ANNOTATED,))
+        return chain_as_tuple(*imports)
+
+    @property
+    def docstring(self) -> str | None:
+        """Get the docstring for this field from its description."""
+        if self.use_field_description:
+            description = self.extras.get("description", None)
+            if description is not None:
+                return f"{description}"
+        elif self.use_inline_field_description:
+            # For inline mode, only use multi-line docstring format for multi-line descriptions
+            description = self.extras.get("description", None)
+            if description is not None and "\n" in description:
+                return f"{description}"
+        return None
+
+    @property
+    def inline_field_docstring(self) -> str | None:
+        """Get the inline docstring for this field if single-line."""
+        if self.use_inline_field_description:
+            description = self.extras.get("description", None)
+            if description is not None and "\n" not in description:
+                return f'"""{description}"""'
+        return None
+
+    @property
+    def unresolved_types(self) -> frozenset[str]:
+        """Get the set of unresolved type references."""
+        return self.data_type.unresolved_types
+
+    @property
+    def field(self) -> str | None:
+        """For backwards compatibility."""
+        return None
+
+    @property
+    def method(self) -> str | None:
+        """Get the method string for this field, if any."""
+        return None
+
+    @property
+    def represented_default(self) -> str:
+        """Get the repr() string of the default value."""
+        if isinstance(self.default, set):
+            return repr_set_sorted(self.default)
+        return repr(self.default)
+
+    @property
+    def annotated(self) -> str | None:
+        """Get the Annotated type hint content, if any."""
+        return None
+
+    @property
+    def needs_annotated_import(self) -> bool:
+        """Check if this field requires the Annotated import."""
+        return bool(self.annotated)
+
+    @property
+    def needs_meta_import(self) -> bool:  # pragma: no cover
+        """Check if this field requires the Meta import (msgspec only)."""
+        return False
+
+    @property
+    def has_default_factory(self) -> bool:
+        """Check if this field has a default_factory."""
+        return "default_factory" in self.extras
+
+    @property
+    def fall_back_to_nullable(self) -> bool:
+        """Check if optional fields should be nullable by default."""
+        return True
+
+    def copy_deep(self) -> Self:
+        """Create a deep copy of this field to avoid mutating the original."""
+        copied = self.copy()
+        copied.parent = None
+        copied.data_type = self.data_type.copy()
+        if self.data_type.data_types:
+            copied.data_type.data_types = [dt.copy() for dt in self.data_type.data_types]
+        return copied
+
+    def replace_data_type(self, new_data_type: DataType, *, clear_old_parent: bool = True) -> None:
+        """Replace data_type and update parent relationships.
+
+        Args:
+            new_data_type: The new DataType to set.
+            clear_old_parent: If True, clear the old data_type's parent reference.
+                Set to False when the old data_type may be referenced elsewhere.
+        """
+        if self.data_type.parent is self and clear_old_parent:
+            self.data_type.swap_with(new_data_type)
+        else:
+            self.data_type = new_data_type
+            new_data_type.parent = self
+
+
+@lru_cache
+def get_template(template_file_path: Path) -> Template:
+    """Load and cache a Jinja2 template from the template directory."""
+    loader = FileSystemLoader(str(TEMPLATE_DIR / template_file_path.parent))
+    environment: Environment = Environment(loader=loader)  # noqa: S701
+    return environment.get_template(template_file_path.name)
+
+
+def sanitize_module_name(name: str, *, treat_dot_as_module: bool) -> str:
+    """Sanitize a module name by replacing invalid characters."""
+    pattern = r"[^0-9a-zA-Z_.]" if treat_dot_as_module else r"[^0-9a-zA-Z_]"
+    sanitized = re.sub(pattern, "_", name)
+    if sanitized and sanitized[0].isdigit():
+        sanitized = f"_{sanitized}"
+    return sanitized
+
+
+def get_module_path(name: str, file_path: Path | None, *, treat_dot_as_module: bool) -> list[str]:
+    """Get the module path components from a name and file path."""
+    if file_path:
+        sanitized_stem = sanitize_module_name(file_path.stem, treat_dot_as_module=treat_dot_as_module)
+        return [
+            *file_path.parts[:-1],
+            sanitized_stem,
+            *name.split(".")[:-1],
+        ]
+    return name.split(".")[:-1]
+
+
+def get_module_name(name: str, file_path: Path | None, *, treat_dot_as_module: bool) -> str:
+    """Get the full module name from a name and file path."""
+    return ".".join(get_module_path(name, file_path, treat_dot_as_module=treat_dot_as_module))
+
+
+class TemplateBase(ABC):
+    """Abstract base class for template-based code generation."""
+
+    @cached_property
+    @abstractmethod
+    def template_file_path(self) -> Path:
+        """Get the path to the template file."""
+        raise NotImplementedError
+
+    @cached_property
+    def template(self) -> Template:
+        """Get the cached Jinja2 template instance."""
+        return get_template(self.template_file_path)
+
+    @abstractmethod
+    def render(self) -> str:
+        """Render the template to a string."""
+        raise NotImplementedError
+
+    def _render(self, *args: Any, **kwargs: Any) -> str:
+        """Render the template with the given arguments."""
+        return self.template.render(*args, **kwargs)
+
+    def __str__(self) -> str:
+        """Return the rendered template as a string."""
+        return self.render()
+
+
+class BaseClassDataType(DataType):
+    """DataType subclass for base class references."""
+
+
+UNDEFINED: Any = object()
+
+
+class DataModel(TemplateBase, Nullable, ABC):  # noqa: PLR0904
+    """Abstract base class for all data model types.
+
+    Handles template rendering, import collection, and model relationships.
+    """
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = ""
+    BASE_CLASS: ClassVar[str] = ""
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = ()
+    IS_ALIAS: bool = False
+
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        methods: list[str] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        frozen: bool = False,
+        treat_dot_as_module: bool = False,
+        dataclass_arguments: DataclassArguments | None = None,
+    ) -> None:
+        """Initialize a data model with fields, base classes, and configuration."""
+        self.keyword_only = keyword_only
+        self.frozen = frozen
+        self.dataclass_arguments: DataclassArguments = dataclass_arguments if dataclass_arguments is not None else {}
+        if not self.TEMPLATE_FILE_PATH:
+            msg = "TEMPLATE_FILE_PATH is undefined"
+            raise Exception(msg)  # noqa: TRY002
+
+        self._custom_template_dir: Path | None = custom_template_dir
+        self.decorators: list[str] = decorators or []
+        self._additional_imports: list[Import] = []
+        self.custom_base_class = custom_base_class
+        if base_classes:
+            self.base_classes: list[BaseClassDataType] = [BaseClassDataType(reference=b) for b in base_classes]
+        else:
+            self.set_base_class()
+
+        self.file_path: Path | None = path
+        self.reference: Reference = reference
+
+        self.reference.source = self
+
+        if extra_template_data is not None:
+            # The supplied defaultdict will either create a new entry,
+            # or already contain a predefined entry for this type
+            self.extra_template_data = extra_template_data[self.reference.path]
+
+            # We use the full object reference path as dictionary key, but
+            # we still support `name` as key because it was used for
+            # `--extra-template-data` input file and we don't want to break the
+            # existing behavior.
+            self.extra_template_data.update(extra_template_data[self.name])
+        else:
+            self.extra_template_data = defaultdict(dict)
+
+        self.fields = self._validate_fields(fields) if fields else []
+
+        for base_class in self.base_classes:
+            if base_class.reference:
+                base_class.reference.children.append(self)
+
+        if extra_template_data is not None:
+            all_model_extra_template_data = extra_template_data.get(ALL_MODEL)
+            if all_model_extra_template_data:
+                # The deepcopy is needed here to ensure that different models don't
+                # end up inadvertently sharing state (such as "base_class_kwargs")
+                self.extra_template_data.update(deepcopy(all_model_extra_template_data))
+
+        self.methods: list[str] = methods or []
+
+        self.description = description
+        for field in self.fields:
+            field.parent = self
+
+        self._additional_imports.extend(self.DEFAULT_IMPORTS)
+        self.default: Any = default
+        self._nullable: bool = nullable
+        self._treat_dot_as_module: bool = treat_dot_as_module
+
+    def _validate_fields(self, fields: list[DataModelFieldBase]) -> list[DataModelFieldBase]:
+        names: set[str] = set()
+        unique_fields: list[DataModelFieldBase] = []
+        for field in fields:
+            if field.name:
+                if field.name in names:
+                    warn(f"Field name `{field.name}` is duplicated on {self.name}", stacklevel=2)
+                    continue
+                names.add(field.name)
+            unique_fields.append(field)
+        return unique_fields
+
+    def iter_all_fields(self, visited: set[str] | None = None) -> Iterator[DataModelFieldBase]:
+        """Yield all fields including those from base classes (parent fields first)."""
+        if visited is None:
+            visited = set()
+        if self.reference.path in visited:  # pragma: no cover
+            return
+        visited.add(self.reference.path)
+        for base_class in self.base_classes:
+            if base_class.reference and isinstance(base_class.reference.source, DataModel):
+                yield from base_class.reference.source.iter_all_fields(visited)
+        yield from self.fields
+
+    def get_dedup_key(self, class_name: str | None = None, *, use_default: bool = True) -> tuple[Any, ...]:
+        """Generate hashable key for model deduplication."""
+        from datamodel_code_generator.parser.base import to_hashable  # noqa: PLC0415
+
+        render_class_name = class_name if class_name is not None or not use_default else "M"
+        return tuple(to_hashable(v) for v in (self.render(class_name=render_class_name), self.imports))
+
+    def create_reuse_model(self, base_ref: Reference) -> Self:
+        """Create inherited model with empty fields pointing to base reference."""
+        return self.__class__(
+            fields=[],
+            base_classes=[base_ref],
+            description=self.description,
+            reference=Reference(
+                name=self.name,
+                path=self.reference.path + "/reuse",
+            ),
+            custom_template_dir=self._custom_template_dir,
+        )
+
+    def replace_children_in_models(self, models: list[DataModel], new_ref: Reference) -> None:
+        """Replace reference children if their parent model is in models list."""
+        from datamodel_code_generator.parser.base import get_most_of_parent  # noqa: PLC0415
+
+        for child in self.reference.children[:]:
+            if isinstance(child, DataType) and get_most_of_parent(child) in models:
+                child.replace_reference(new_ref)
+
+    def set_base_class(self) -> None:
+        """Set up the base class for this model."""
+        base_class = self.custom_base_class or self.BASE_CLASS
+        if not base_class:
+            self.base_classes = []
+            return
+        base_class_import = Import.from_full_path(base_class)
+        self._additional_imports.append(base_class_import)
+        self.base_classes = [BaseClassDataType.from_import(base_class_import)]
+
+    @cached_property
+    def template_file_path(self) -> Path:
+        """Get the path to the template file, checking custom directory first."""
+        template_file_path = Path(self.TEMPLATE_FILE_PATH)
+        if self._custom_template_dir is not None:
+            custom_template_file_path = self._custom_template_dir / template_file_path
+            if custom_template_file_path.exists():
+                return custom_template_file_path
+        return template_file_path
+
+    @property
+    def imports(self) -> tuple[Import, ...]:
+        """Get all imports required by this model and its fields."""
+        return chain_as_tuple(
+            (i for f in self.fields for i in f.imports),
+            self._additional_imports,
+        )
+
+    @property
+    def reference_classes(self) -> frozenset[str]:
+        """Get all referenced class paths used by this model."""
+        return frozenset(
+            {r.reference.path for r in self.base_classes if r.reference}
+            | {t for f in self.fields for t in f.unresolved_types}
+        )
+
+    @property
+    def name(self) -> str:
+        """Get the full name of this model."""
+        return self.reference.name
+
+    @property
+    def duplicate_name(self) -> str:
+        """Get the duplicate name for this model if it exists."""
+        return self.reference.duplicate_name or ""
+
+    @property
+    def base_class(self) -> str:
+        """Get the comma-separated string of base class names."""
+        return ", ".join(b.type_hint for b in self.base_classes)
+
+    @staticmethod
+    def _get_class_name(name: str) -> str:
+        if "." in name:
+            return name.rsplit(".", 1)[-1]
+        return name
+
+    @property
+    def class_name(self) -> str:
+        """Get the class name without module path."""
+        return self._get_class_name(self.name)
+
+    @class_name.setter
+    def class_name(self, class_name: str) -> None:
+        if "." in self.reference.name:
+            self.reference.name = f"{self.reference.name.rsplit('.', 1)[0]}.{class_name}"
+        else:
+            self.reference.name = class_name
+
+    @property
+    def duplicate_class_name(self) -> str:
+        """Get the duplicate class name without module path."""
+        return self._get_class_name(self.duplicate_name)
+
+    @property
+    def module_path(self) -> list[str]:
+        """Get the module path components for this model."""
+        return get_module_path(self.name, self.file_path, treat_dot_as_module=self._treat_dot_as_module)
+
+    @property
+    def module_name(self) -> str:
+        """Get the full module name for this model."""
+        return get_module_name(self.name, self.file_path, treat_dot_as_module=self._treat_dot_as_module)
+
+    @property
+    def all_data_types(self) -> Iterator[DataType]:
+        """Iterate over all data types used in this model."""
+        for field in self.fields:
+            yield from field.data_type.all_data_types
+        yield from self.base_classes
+
+    @property
+    def is_alias(self) -> bool:
+        """Whether is a type alias (i.e. not an instance of BaseModel/RootModel)."""
+        return self.IS_ALIAS
+
+    @property
+    def nullable(self) -> bool:
+        """Check if this model is nullable."""
+        return self._nullable
+
+    @cached_property
+    def path(self) -> str:
+        """Get the full reference path for this model."""
+        return self.reference.path
+
+    def set_reference_path(self, new_path: str) -> None:
+        """Set reference path and clear cached path property."""
+        self.reference.path = new_path
+        if "path" in self.__dict__:
+            del self.__dict__["path"]
+
+    def render(self, *, class_name: str | None = None) -> str:
+        """Render the model to a string using the template."""
+        return self._render(
+            class_name=class_name or self.class_name,
+            fields=self.fields,
+            decorators=self.decorators,
+            base_class=self.base_class,
+            methods=self.methods,
+            description=self.description,
+            dataclass_arguments=self.dataclass_arguments,
+            **self.extra_template_data,
+        )
+
+
+if PYDANTIC_V2:
+    _rebuild_namespace = {"Union": Union, "DataModelFieldBase": DataModelFieldBase, "DataType": DataType}
+    DataType.model_rebuild(_types_namespace=_rebuild_namespace)
+    BaseClassDataType.model_rebuild(_types_namespace=_rebuild_namespace)
+    DataModelFieldBase.model_rebuild(_types_namespace={"DataModel": DataModel})
+else:
+    _rebuild_namespace = {"Union": Union, "DataModelFieldBase": DataModelFieldBase, "DataType": DataType}
+    DataType.model_rebuild(_types_namespace=_rebuild_namespace)
+    BaseClassDataType.model_rebuild(_types_namespace=_rebuild_namespace)
+    DataModelFieldBase.model_rebuild(_types_namespace={"DataModel": DataModel})
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/dataclass.py 0.45.0-1/src/datamodel_code_generator/model/dataclass.py
--- 0.26.4-3/src/datamodel_code_generator/model/dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/dataclass.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,211 @@
+"""Python dataclass model generator.
+
+Generates Python dataclasses using the @dataclass decorator.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
+
+from datamodel_code_generator import DataclassArguments, DatetimeClassType, PythonVersion, PythonVersionMin
+from datamodel_code_generator.imports import (
+    IMPORT_DATE,
+    IMPORT_DATETIME,
+    IMPORT_TIME,
+    IMPORT_TIMEDELTA,
+    Import,
+)
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model.base import UNDEFINED
+from datamodel_code_generator.model.imports import IMPORT_DATACLASS, IMPORT_FIELD
+from datamodel_code_generator.model.pydantic.base_model import Constraints  # noqa: TC001 # needed for pydantic
+from datamodel_code_generator.model.types import DataTypeManager as _DataTypeManager
+from datamodel_code_generator.model.types import type_map_factory
+from datamodel_code_generator.types import DataType, StrictTypes, Types, chain_as_tuple
+
+if TYPE_CHECKING:
+    from collections import defaultdict
+    from collections.abc import Sequence
+    from pathlib import Path
+
+    from datamodel_code_generator.reference import Reference
+
+
+def has_field_assignment(field: DataModelFieldBase) -> bool:
+    """Check if a dataclass field has a default value or field() assignment."""
+    return bool(field.field) or not (
+        field.required or (field.represented_default == "None" and field.strip_default_none)
+    )
+
+
+class DataClass(DataModel):
+    """DataModel implementation for Python dataclasses."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "dataclass.jinja2"
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_DATACLASS,)
+
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        methods: list[str] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        frozen: bool = False,
+        treat_dot_as_module: bool = False,
+        dataclass_arguments: DataclassArguments | None = None,
+    ) -> None:
+        """Initialize dataclass with fields sorted by field assignment requirement."""
+        super().__init__(
+            reference=reference,
+            fields=sorted(fields, key=has_field_assignment),
+            decorators=decorators,
+            base_classes=base_classes,
+            custom_base_class=custom_base_class,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            methods=methods,
+            path=path,
+            description=description,
+            default=default,
+            nullable=nullable,
+            keyword_only=keyword_only,
+            frozen=frozen,
+            treat_dot_as_module=treat_dot_as_module,
+        )
+        if dataclass_arguments is not None:
+            self.dataclass_arguments = dataclass_arguments
+        else:
+            self.dataclass_arguments = {}
+            if frozen:
+                self.dataclass_arguments["frozen"] = True
+            if keyword_only:
+                self.dataclass_arguments["kw_only"] = True
+
+
+class DataModelField(DataModelFieldBase):
+    """Field implementation for dataclass models."""
+
+    _FIELD_KEYS: ClassVar[set[str]] = {
+        "default_factory",
+        "init",
+        "repr",
+        "hash",
+        "compare",
+        "metadata",
+        "kw_only",
+    }
+    constraints: Optional[Constraints] = None  # noqa: UP045
+
+    def process_const(self) -> None:
+        """Process const field constraint using literal type."""
+        self._process_const_as_literal()
+
+    @property
+    def imports(self) -> tuple[Import, ...]:
+        """Get imports including field() if needed."""
+        field = self.field
+        if field and field.startswith("field("):
+            return chain_as_tuple(super().imports, (IMPORT_FIELD,))
+        return super().imports
+
+    @property
+    def field(self) -> str | None:
+        """For backwards compatibility."""
+        result = str(self)
+        if not result:
+            return None
+        return result
+
+    def __str__(self) -> str:
+        """Generate field() call or default value representation."""
+        data: dict[str, Any] = {k: v for k, v in self.extras.items() if k in self._FIELD_KEYS}
+
+        if self.default != UNDEFINED and self.default is not None:
+            data["default"] = self.default
+
+        if self.required:
+            data = {
+                k: v
+                for k, v in data.items()
+                if k
+                not in {
+                    "default",
+                    "default_factory",
+                }
+            }
+
+        if not data:
+            return ""
+
+        if len(data) == 1 and "default" in data:
+            default = data["default"]
+
+            if isinstance(default, (list, dict, set)):
+                if default:
+                    from datamodel_code_generator.model.base import repr_set_sorted  # noqa: PLC0415
+
+                    default_repr = repr_set_sorted(default) if isinstance(default, set) else repr(default)
+                    return f"field(default_factory=lambda: {default_repr})"
+                return f"field(default_factory={type(default).__name__})"
+            return repr(default)
+        kwargs = [f"{k}={v if k == 'default_factory' else repr(v)}" for k, v in data.items()]
+        return f"field({', '.join(kwargs)})"
+
+
+class DataTypeManager(_DataTypeManager):
+    """Type manager for dataclass models."""
+
+    def __init__(  # noqa: PLR0913, PLR0917
+        self,
+        python_version: PythonVersion = PythonVersionMin,
+        use_standard_collections: bool = False,  # noqa: FBT001, FBT002
+        use_generic_container_types: bool = False,  # noqa: FBT001, FBT002
+        strict_types: Sequence[StrictTypes] | None = None,
+        use_non_positive_negative_number_constrained_types: bool = False,  # noqa: FBT001, FBT002
+        use_decimal_for_multiple_of: bool = False,  # noqa: FBT001, FBT002
+        use_union_operator: bool = False,  # noqa: FBT001, FBT002
+        use_pendulum: bool = False,  # noqa: FBT001, FBT002
+        target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
+        treat_dot_as_module: bool = False,  # noqa: FBT001, FBT002
+        use_serialize_as_any: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
+        """Initialize type manager with datetime type mapping."""
+        super().__init__(
+            python_version,
+            use_standard_collections,
+            use_generic_container_types,
+            strict_types,
+            use_non_positive_negative_number_constrained_types,
+            use_decimal_for_multiple_of,
+            use_union_operator,
+            use_pendulum,
+            target_datetime_class,
+            treat_dot_as_module,
+            use_serialize_as_any,
+        )
+
+        datetime_map = (
+            {
+                Types.time: self.data_type.from_import(IMPORT_TIME),
+                Types.date: self.data_type.from_import(IMPORT_DATE),
+                Types.date_time: self.data_type.from_import(IMPORT_DATETIME),
+                Types.timedelta: self.data_type.from_import(IMPORT_TIMEDELTA),
+            }
+            if target_datetime_class is DatetimeClassType.Datetime
+            else {}
+        )
+
+        self.type_map: dict[Types, DataType] = {
+            **type_map_factory(self.data_type),
+            **datetime_map,
+        }
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/enum.py 0.45.0-1/src/datamodel_code_generator/model/enum.py
--- 0.26.4-3/src/datamodel_code_generator/model/enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/enum.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,159 @@
+"""Enumeration model generator.
+
+Provides Enum, StrEnum, and specialized enum classes for code generation.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
+
+from datamodel_code_generator.imports import IMPORT_ANY, IMPORT_ENUM, IMPORT_INT_ENUM, IMPORT_STR_ENUM, Import
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model.base import UNDEFINED, BaseClassDataType
+from datamodel_code_generator.types import DataType, Types
+
+if TYPE_CHECKING:
+    from collections import defaultdict
+    from pathlib import Path
+
+    from datamodel_code_generator.reference import Reference
+
+
+_INT: str = "int"
+_FLOAT: str = "float"
+_BYTES: str = "bytes"
+_STR: str = "str"
+
+SUBCLASS_BASE_CLASSES: dict[Types, str] = {
+    Types.int32: _INT,
+    Types.int64: _INT,
+    Types.integer: _INT,
+    Types.float: _FLOAT,
+    Types.double: _FLOAT,
+    Types.number: _FLOAT,
+    Types.byte: _BYTES,
+    Types.string: _STR,
+}
+
+
+class Enum(DataModel):
+    """DataModel implementation for Python enumerations."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "Enum.jinja2"
+    BASE_CLASS: ClassVar[str] = "enum.Enum"
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_ENUM,)
+
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        methods: list[str] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        type_: Types | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        treat_dot_as_module: bool = False,
+    ) -> None:
+        """Initialize Enum with optional specialized base class based on type."""
+        super().__init__(
+            reference=reference,
+            fields=fields,
+            decorators=decorators,
+            base_classes=base_classes,
+            custom_base_class=custom_base_class,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            methods=methods,
+            path=path,
+            description=description,
+            default=default,
+            nullable=nullable,
+            keyword_only=keyword_only,
+            treat_dot_as_module=treat_dot_as_module,
+        )
+        if not base_classes and type_ and (base_class := SUBCLASS_BASE_CLASSES.get(type_)):
+            self.base_classes: list[BaseClassDataType] = [
+                BaseClassDataType(type=base_class),
+                *self.base_classes,
+            ]
+
+    @classmethod
+    def get_data_type(cls, types: Types, **kwargs: Any) -> DataType:
+        """Get data type for enum (not implemented)."""
+        raise NotImplementedError
+
+    def get_member(self, field: DataModelFieldBase) -> Member:
+        """Create a Member instance for the given field."""
+        return Member(self, field)
+
+    def find_member(self, value: Any) -> Member | None:
+        """Find enum member matching the given value."""
+        repr_value = repr(value)
+        # Remove surrounding quotes from the string representation
+        str_value = str(value).strip("'\"")
+
+        for field in self.fields:
+            # Remove surrounding quotes from field default value
+            field_default = str(field.default or "").strip("'\"")
+
+            # Compare values after removing quotes
+            if field_default == str_value:
+                return self.get_member(field)
+
+            # Keep original comparison for backwards compatibility
+            if field.default == repr_value:  # pragma: no cover
+                return self.get_member(field)
+
+        return None
+
+    @property
+    def imports(self) -> tuple[Import, ...]:
+        """Get imports excluding Any."""
+        return tuple(i for i in super().imports if i != IMPORT_ANY)
+
+
+class StrEnum(Enum):
+    """String enumeration type."""
+
+    BASE_CLASS: ClassVar[str] = "enum.StrEnum"
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_STR_ENUM,)
+
+
+class IntEnum(Enum):
+    """Integer enumeration type."""
+
+    BASE_CLASS: ClassVar[str] = "enum.IntEnum"
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_INT_ENUM,)
+
+
+SPECIALIZED_ENUM_TYPE_MATCH: dict[Types, type[Enum]] = {
+    Types.int32: IntEnum,
+    Types.int64: IntEnum,
+    Types.integer: IntEnum,
+    Types.string: StrEnum,
+}
+"""
+Map specialized enum types to their corresponding Enum subclasses.
+"""
+
+
+class Member:
+    """Represents an enum member with its parent enum and field."""
+
+    def __init__(self, enum: Enum, field: DataModelFieldBase) -> None:
+        """Initialize enum member."""
+        self.enum: Enum = enum
+        self.field: DataModelFieldBase = field
+        self.alias: Optional[str] = None  # noqa: UP045
+
+    def __repr__(self) -> str:
+        """Return string representation of enum member."""
+        return f"{self.alias or self.enum.class_name}.{self.field.name}"
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/imports.py 0.45.0-1/src/datamodel_code_generator/model/imports.py
--- 0.26.4-3/src/datamodel_code_generator/model/imports.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/imports.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,22 @@
+"""Import definitions for model modules.
+
+Provides pre-defined Import objects for dataclasses, TypedDict, msgspec, etc.
+"""
+
+from __future__ import annotations
+
+from datamodel_code_generator.imports import Import
+
+IMPORT_DATACLASS = Import.from_full_path("dataclasses.dataclass")
+IMPORT_FIELD = Import.from_full_path("dataclasses.field")
+IMPORT_CLASSVAR = Import.from_full_path("typing.ClassVar")
+IMPORT_TYPED_DICT = Import.from_full_path("typing.TypedDict")
+IMPORT_TYPED_DICT_BACKPORT = Import.from_full_path("typing_extensions.TypedDict")
+IMPORT_NOT_REQUIRED = Import.from_full_path("typing.NotRequired")
+IMPORT_NOT_REQUIRED_BACKPORT = Import.from_full_path("typing_extensions.NotRequired")
+IMPORT_MSGSPEC_STRUCT = Import.from_full_path("msgspec.Struct")
+IMPORT_MSGSPEC_FIELD = Import.from_full_path("msgspec.field")
+IMPORT_MSGSPEC_META = Import.from_full_path("msgspec.Meta")
+IMPORT_MSGSPEC_CONVERT = Import.from_full_path("msgspec.convert")
+IMPORT_MSGSPEC_UNSET = Import.from_full_path("msgspec.UNSET")
+IMPORT_MSGSPEC_UNSETTYPE = Import.from_full_path("msgspec.UnsetType")
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/msgspec.py 0.45.0-1/src/datamodel_code_generator/model/msgspec.py
--- 0.26.4-3/src/datamodel_code_generator/model/msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/msgspec.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,462 @@
+"""msgspec.Struct model generator.
+
+Generates Python models using msgspec.Struct for high-performance serialization.
+"""
+
+from __future__ import annotations
+
+from functools import lru_cache, wraps
+from typing import TYPE_CHECKING, Any, ClassVar, Optional, TypeVar
+
+from pydantic import Field
+
+from datamodel_code_generator import DatetimeClassType, PythonVersion, PythonVersionMin
+from datamodel_code_generator.imports import (
+    IMPORT_DATE,
+    IMPORT_DATETIME,
+    IMPORT_TIME,
+    IMPORT_TIMEDELTA,
+    IMPORT_UNION,
+    Import,
+)
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model.base import UNDEFINED
+from datamodel_code_generator.model.imports import (
+    IMPORT_MSGSPEC_CONVERT,
+    IMPORT_MSGSPEC_FIELD,
+    IMPORT_MSGSPEC_META,
+    IMPORT_MSGSPEC_UNSET,
+    IMPORT_MSGSPEC_UNSETTYPE,
+)
+from datamodel_code_generator.model.pydantic.base_model import (
+    Constraints as _Constraints,
+)
+from datamodel_code_generator.model.type_alias import TypeAliasBase
+from datamodel_code_generator.model.types import DataTypeManager as _DataTypeManager
+from datamodel_code_generator.model.types import type_map_factory
+from datamodel_code_generator.types import (
+    NONE,
+    OPTIONAL_PREFIX,
+    UNION_DELIMITER,
+    UNION_OPERATOR_DELIMITER,
+    UNION_PREFIX,
+    DataType,
+    StrictTypes,
+    Types,
+    _remove_none_from_union,
+    chain_as_tuple,
+)
+
+UNSET_TYPE = "UnsetType"
+
+
+class _UNSET:
+    def __str__(self) -> str:
+        return "UNSET"
+
+    __repr__ = __str__
+
+
+UNSET = _UNSET()
+
+
+if TYPE_CHECKING:
+    from collections import defaultdict
+    from collections.abc import Sequence
+    from pathlib import Path
+
+    from datamodel_code_generator.reference import Reference
+
+
+def _has_field_assignment(field: DataModelFieldBase) -> bool:
+    return not (field.required or (field.represented_default == "None" and field.strip_default_none))
+
+
+DataModelFieldBaseT = TypeVar("DataModelFieldBaseT", bound=DataModelFieldBase)
+
+
+def import_extender(cls: type[DataModelFieldBaseT]) -> type[DataModelFieldBaseT]:
+    """Extend imports property with msgspec-specific imports."""
+    original_imports: property = cls.imports
+
+    @wraps(original_imports.fget)  # pyright: ignore[reportArgumentType]
+    def new_imports(self: DataModelFieldBaseT) -> tuple[Import, ...]:
+        if self.extras.get("is_classvar"):
+            return ()
+        extra_imports = []
+        field = self.field
+        # TODO: Improve field detection
+        if field and field.startswith("field("):
+            extra_imports.append(IMPORT_MSGSPEC_FIELD)
+        if self.field and "lambda: convert" in self.field:
+            extra_imports.append(IMPORT_MSGSPEC_CONVERT)
+        if isinstance(self, DataModelField) and self.needs_meta_import:
+            extra_imports.append(IMPORT_MSGSPEC_META)
+        if not self.required and not self.nullable:
+            extra_imports.append(IMPORT_MSGSPEC_UNSETTYPE)
+            if not self.data_type.use_union_operator:
+                extra_imports.append(IMPORT_UNION)
+            if self.default is None or self.default is UNDEFINED:
+                extra_imports.append(IMPORT_MSGSPEC_UNSET)
+        return chain_as_tuple(original_imports.fget(self), extra_imports)  # pyright: ignore[reportOptionalCall]
+
+    cls.imports = property(new_imports)  # pyright: ignore[reportAttributeAccessIssue]
+    return cls
+
+
+class Struct(DataModel):
+    """DataModel implementation for msgspec.Struct."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "msgspec.jinja2"
+    BASE_CLASS: ClassVar[str] = "msgspec.Struct"
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = ()
+
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        methods: list[str] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        treat_dot_as_module: bool = False,
+    ) -> None:
+        """Initialize msgspec Struct with fields sorted by field assignment requirement."""
+        super().__init__(
+            reference=reference,
+            fields=sorted(fields, key=_has_field_assignment),
+            decorators=decorators,
+            base_classes=base_classes,
+            custom_base_class=custom_base_class,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            methods=methods,
+            path=path,
+            description=description,
+            default=default,
+            nullable=nullable,
+            keyword_only=keyword_only,
+            treat_dot_as_module=treat_dot_as_module,
+        )
+        self.extra_template_data.setdefault("base_class_kwargs", {})
+        if self.keyword_only:
+            self.add_base_class_kwarg("kw_only", "True")
+
+    def add_base_class_kwarg(self, name: str, value: str) -> None:
+        """Add keyword argument to base class constructor."""
+        self.extra_template_data["base_class_kwargs"][name] = value
+
+
+class Constraints(_Constraints):
+    """Constraint model for msgspec fields."""
+
+    # To override existing pattern alias
+    regex: Optional[str] = Field(None, alias="regex")  # noqa: UP045
+    pattern: Optional[str] = Field(None, alias="pattern")  # noqa: UP045
+
+
+@lru_cache
+def get_neither_required_nor_nullable_type(type_: str, use_union_operator: bool) -> str:  # noqa: FBT001
+    """Get type hint for fields that are neither required nor nullable, using UnsetType."""
+    type_ = _remove_none_from_union(type_, use_union_operator=use_union_operator)
+    if type_.startswith(OPTIONAL_PREFIX):  # pragma: no cover
+        type_ = type_[len(OPTIONAL_PREFIX) : -1]
+
+    if not type_ or type_ == NONE:
+        return UNSET_TYPE
+    if use_union_operator:
+        return UNION_OPERATOR_DELIMITER.join((type_, UNSET_TYPE))
+    if type_.startswith(UNION_PREFIX):
+        return f"{type_[:-1]}{UNION_DELIMITER}{UNSET_TYPE}]"
+    return f"{UNION_PREFIX}{type_}{UNION_DELIMITER}{UNSET_TYPE}]"
+
+
+@lru_cache
+def _add_unset_type(type_: str, use_union_operator: bool) -> str:  # noqa: FBT001
+    """Add UnsetType to a type hint without removing None."""
+    if use_union_operator:
+        return f"{type_}{UNION_OPERATOR_DELIMITER}{UNSET_TYPE}"
+    if type_.startswith(UNION_PREFIX):
+        return f"{type_[:-1]}{UNION_DELIMITER}{UNSET_TYPE}]"
+    if type_.startswith(OPTIONAL_PREFIX):  # pragma: no cover
+        inner_type = type_[len(OPTIONAL_PREFIX) : -1]
+        return f"{UNION_PREFIX}{inner_type}{UNION_DELIMITER}{NONE}{UNION_DELIMITER}{UNSET_TYPE}]"
+    return f"{UNION_PREFIX}{type_}{UNION_DELIMITER}{UNSET_TYPE}]"
+
+
+@import_extender
+class DataModelField(DataModelFieldBase):
+    """Field implementation for msgspec Struct models."""
+
+    _FIELD_KEYS: ClassVar[set[str]] = {
+        "default",
+        "default_factory",
+    }
+    _META_FIELD_KEYS: ClassVar[set[str]] = {
+        "title",
+        "description",
+        "gt",
+        "ge",
+        "lt",
+        "le",
+        "multiple_of",
+        # 'min_items', # not supported by msgspec
+        # 'max_items', # not supported by msgspec
+        "min_length",
+        "max_length",
+        "pattern",
+        "examples",
+        # 'unique_items', # not supported by msgspec
+    }
+    _PARSE_METHOD = "convert"
+    _COMPARE_EXPRESSIONS: ClassVar[set[str]] = {"gt", "ge", "lt", "le", "multiple_of"}
+    constraints: Optional[Constraints] = None  # noqa: UP045
+
+    def process_const(self) -> None:
+        """Process const field constraint."""
+        if "const" not in self.extras:
+            return
+        self.const = True
+        self.nullable = False
+        const = self.extras["const"]
+        if self.data_type.type == "str" and isinstance(const, str):  # pragma: no cover # Literal supports only str
+            self.replace_data_type(self.data_type.__class__(literals=[const]), clear_old_parent=False)
+
+    def _get_strict_field_constraint_value(self, constraint: str, value: Any) -> Any:
+        """Get constraint value with appropriate numeric type."""
+        if value is None or constraint not in self._COMPARE_EXPRESSIONS:
+            return value
+
+        if any(data_type.type == "float" for data_type in self.data_type.all_data_types):
+            return float(value)
+        return int(value)
+
+    @property
+    def field(self) -> str | None:
+        """For backwards compatibility."""
+        result = str(self)
+        if not result:
+            return None
+        return result
+
+    def __str__(self) -> str:
+        """Generate field() call or default value representation."""
+        data: dict[str, Any] = {k: v for k, v in self.extras.items() if k in self._FIELD_KEYS}
+        if self.alias:
+            data["name"] = self.alias
+
+        if self.default is not UNDEFINED and self.default is not None:
+            data["default"] = self.default
+        elif self._not_required and "default_factory" not in data:
+            data["default"] = None if self.nullable else UNSET
+
+        if self.required:
+            data = {
+                k: v
+                for k, v in data.items()
+                if k
+                not in {
+                    "default",
+                    "default_factory",
+                }
+            }
+        elif self.default and "default_factory" not in data:
+            default_factory = self._get_default_as_struct_model()
+            if default_factory is not None:
+                data.pop("default")
+                data["default_factory"] = default_factory
+
+        if "default" in data and isinstance(data["default"], (list, dict, set)) and "default_factory" not in data:
+            default_value = data.pop("default")
+            if default_value:
+                from datamodel_code_generator.model.base import repr_set_sorted  # noqa: PLC0415
+
+                default_repr = repr_set_sorted(default_value) if isinstance(default_value, set) else repr(default_value)
+                data["default_factory"] = f"lambda: {default_repr}"
+            else:
+                data["default_factory"] = type(default_value).__name__
+
+        if not data:
+            return ""
+
+        if len(data) == 1 and "default" in data:
+            return repr(data["default"])
+
+        kwargs = [f"{k}={v if k == 'default_factory' else repr(v)}" for k, v in data.items()]
+        return f"field({', '.join(kwargs)})"
+
+    @property
+    def type_hint(self) -> str:
+        """Return the type hint, using UnsetType for non-required non-nullable fields."""
+        type_hint = super().type_hint
+        if self._not_required and not self.nullable:
+            if self.data_type.is_optional:
+                return _add_unset_type(type_hint, self.data_type.use_union_operator)
+            return get_neither_required_nor_nullable_type(type_hint, self.data_type.use_union_operator)
+        return type_hint
+
+    @property
+    def _not_required(self) -> bool:
+        return not self.required and isinstance(self.parent, Struct)
+
+    @property
+    def fall_back_to_nullable(self) -> bool:
+        """Return whether to fall back to nullable type instead of UnsetType."""
+        return not self._not_required
+
+    def _get_meta_string(self) -> str | None:
+        """Compute Meta(...) string if there are any meta constraints."""
+        data: dict[str, Any] = {k: v for k, v in self.extras.items() if k in self._META_FIELD_KEYS}
+        has_type_constraints = self.data_type.kwargs is not None and len(self.data_type.kwargs) > 0
+        if (
+            self.constraints is not None
+            and not self.self_reference()
+            and not (self.data_type.strict and has_type_constraints)
+        ):
+            data = {
+                **data,
+                **{
+                    k: self._get_strict_field_constraint_value(k, v)
+                    for k, v in self.constraints.dict().items()
+                    if k in self._META_FIELD_KEYS
+                },
+            }
+
+        meta_arguments = sorted(f"{k}={v!r}" for k, v in data.items() if v is not None)
+        return f"Meta({', '.join(meta_arguments)})" if meta_arguments else None
+
+    @property
+    def annotated(self) -> str | None:  # noqa: PLR0911
+        """Get Annotated type hint with Meta constraints.
+
+        For ClassVar fields (discriminator tag_field), ClassVar is required
+        regardless of use_annotated setting.
+        """
+        if self.extras.get("is_classvar"):
+            meta = self._get_meta_string()
+            if self.use_annotated and meta:
+                return f"ClassVar[Annotated[{self.type_hint}, {meta}]]"
+            return f"ClassVar[{self.type_hint}]"
+
+        if not self.use_annotated:  # pragma: no cover
+            return None
+
+        meta = self._get_meta_string()
+        if not meta:
+            return None
+
+        if self.required:
+            return f"Annotated[{self.type_hint}, {meta}]"
+
+        type_hint = self.data_type.type_hint
+        annotated_type = f"Annotated[{type_hint}, {meta}]"
+        if self.nullable:  # pragma: no cover
+            return annotated_type
+        if self.data_type.is_optional:  # pragma: no cover
+            return _add_unset_type(annotated_type, self.data_type.use_union_operator)
+        return get_neither_required_nor_nullable_type(annotated_type, self.data_type.use_union_operator)
+
+    @property
+    def needs_annotated_import(self) -> bool:
+        """Check if this field requires the Annotated import.
+
+        ClassVar fields with Meta need Annotated only when use_annotated is True.
+        ClassVar fields without Meta don't need Annotated.
+        """
+        if not self.annotated:
+            return False
+        if self.extras.get("is_classvar"):
+            return self.use_annotated and self._get_meta_string() is not None
+        return True
+
+    @property
+    def needs_meta_import(self) -> bool:
+        """Check if this field requires the Meta import."""
+        return self._get_meta_string() is not None
+
+    def _get_default_as_struct_model(self) -> str | None:
+        """Convert default value to Struct model using msgspec convert."""
+        for data_type in self.data_type.data_types or (self.data_type,):
+            # TODO: Check nested data_types
+            if data_type.is_dict:
+                # TODO: Parse dict model for default
+                continue  # pragma: no cover
+            if data_type.is_list and len(data_type.data_types) == 1:
+                data_type_child = data_type.data_types[0]
+                if (  # pragma: no cover
+                    data_type_child.reference
+                    and (isinstance(data_type_child.reference.source, (Struct, TypeAliasBase)))
+                    and isinstance(self.default, list)
+                ):
+                    return (
+                        f"lambda: {self._PARSE_METHOD}({self.default!r},  "
+                        f"type=list[{data_type_child.alias or data_type_child.reference.source.class_name}])"
+                    )
+            elif data_type.reference and isinstance(data_type.reference.source, Struct):
+                if self.data_type.is_union:
+                    if not isinstance(self.default, (dict, list)):
+                        continue
+                    if isinstance(self.default, dict) and any(dt.is_dict for dt in self.data_type.data_types):
+                        continue
+                return (
+                    f"lambda: {self._PARSE_METHOD}({self.default!r},  "
+                    f"type={data_type.alias or data_type.reference.source.class_name})"
+                )
+        return None
+
+
+class DataTypeManager(_DataTypeManager):
+    """Type manager for msgspec Struct models."""
+
+    def __init__(  # noqa: PLR0913, PLR0917
+        self,
+        python_version: PythonVersion = PythonVersionMin,
+        use_standard_collections: bool = False,  # noqa: FBT001, FBT002
+        use_generic_container_types: bool = False,  # noqa: FBT001, FBT002
+        strict_types: Sequence[StrictTypes] | None = None,
+        use_non_positive_negative_number_constrained_types: bool = False,  # noqa: FBT001, FBT002
+        use_decimal_for_multiple_of: bool = False,  # noqa: FBT001, FBT002
+        use_union_operator: bool = False,  # noqa: FBT001, FBT002
+        use_pendulum: bool = False,  # noqa: FBT001, FBT002
+        target_datetime_class: DatetimeClassType | None = None,
+        treat_dot_as_module: bool = False,  # noqa: FBT001, FBT002
+        use_serialize_as_any: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
+        """Initialize type manager with optional datetime type mapping."""
+        super().__init__(
+            python_version,
+            use_standard_collections,
+            use_generic_container_types,
+            strict_types,
+            use_non_positive_negative_number_constrained_types,
+            use_decimal_for_multiple_of,
+            use_union_operator,
+            use_pendulum,
+            target_datetime_class,
+            treat_dot_as_module,
+            use_serialize_as_any,
+        )
+
+        datetime_map = (
+            {
+                Types.time: self.data_type.from_import(IMPORT_TIME),
+                Types.date: self.data_type.from_import(IMPORT_DATE),
+                Types.date_time: self.data_type.from_import(IMPORT_DATETIME),
+                Types.timedelta: self.data_type.from_import(IMPORT_TIMEDELTA),
+            }
+            if target_datetime_class is DatetimeClassType.Datetime
+            else {}
+        )
+
+        self.type_map: dict[Types, DataType] = {
+            **type_map_factory(self.data_type),
+            **datetime_map,
+        }
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic/__init__.py 0.45.0-1/src/datamodel_code_generator/model/pydantic/__init__.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/pydantic/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,48 @@
+"""Pydantic v1 model generator.
+
+Provides BaseModel, CustomRootType, and DataModelField for generating
+Pydantic v1 compatible data models.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Optional
+
+from pydantic import BaseModel as _BaseModel
+
+from .base_model import BaseModel, DataModelField
+from .custom_root_type import CustomRootType
+from .dataclass import DataClass
+from .types import DataTypeManager
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable
+
+
+def dump_resolve_reference_action(class_names: Iterable[str]) -> str:
+    """Generate update_forward_refs() calls for Pydantic v1 models."""
+    return "\n".join(f"{class_name}.update_forward_refs()" for class_name in class_names)
+
+
+class Config(_BaseModel):
+    """Pydantic model config options."""
+
+    extra: Optional[str] = None  # noqa: UP045
+    title: Optional[str] = None  # noqa: UP045
+    allow_population_by_field_name: Optional[bool] = None  # noqa: UP045
+    allow_extra_fields: Optional[bool] = None  # noqa: UP045
+    extra_fields: Optional[str] = None  # noqa: UP045
+    allow_mutation: Optional[bool] = None  # noqa: UP045
+    arbitrary_types_allowed: Optional[bool] = None  # noqa: UP045
+    orm_mode: Optional[bool] = None  # noqa: UP045
+    validate_assignment: Optional[bool] = None  # noqa: UP045
+
+
+__all__ = [
+    "BaseModel",
+    "CustomRootType",
+    "DataClass",
+    "DataModelField",
+    "DataTypeManager",
+    "dump_resolve_reference_action",
+]
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic/base_model.py 0.45.0-1/src/datamodel_code_generator/model/pydantic/base_model.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic/base_model.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/pydantic/base_model.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,377 @@
+"""Pydantic v1 BaseModel implementation.
+
+Provides Constraints, DataModelField, and BaseModel for Pydantic v1.
+"""
+
+from __future__ import annotations
+
+from abc import ABC
+from functools import cached_property
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, ClassVar, Optional
+
+from pydantic import Field
+
+from datamodel_code_generator.model import (
+    ConstraintsBase,
+    DataModel,
+    DataModelFieldBase,
+)
+from datamodel_code_generator.model.base import UNDEFINED
+from datamodel_code_generator.model.pydantic.imports import (
+    IMPORT_ANYURL,
+    IMPORT_EXTRA,
+    IMPORT_FIELD,
+)
+from datamodel_code_generator.types import STANDARD_LIST, UnionIntFloat, chain_as_tuple
+
+if TYPE_CHECKING:
+    from collections import defaultdict
+
+    from datamodel_code_generator.imports import Import
+    from datamodel_code_generator.reference import Reference
+
+
+class Constraints(ConstraintsBase):
+    """Pydantic v1 field constraints (gt, ge, lt, le, regex, etc.)."""
+
+    gt: Optional[UnionIntFloat] = Field(None, alias="exclusiveMinimum")  # noqa: UP045
+    ge: Optional[UnionIntFloat] = Field(None, alias="minimum")  # noqa: UP045
+    lt: Optional[UnionIntFloat] = Field(None, alias="exclusiveMaximum")  # noqa: UP045
+    le: Optional[UnionIntFloat] = Field(None, alias="maximum")  # noqa: UP045
+    multiple_of: Optional[float] = Field(None, alias="multipleOf")  # noqa: UP045
+    min_items: Optional[int] = Field(None, alias="minItems")  # noqa: UP045
+    max_items: Optional[int] = Field(None, alias="maxItems")  # noqa: UP045
+    min_length: Optional[int] = Field(None, alias="minLength")  # noqa: UP045
+    max_length: Optional[int] = Field(None, alias="maxLength")  # noqa: UP045
+    regex: Optional[str] = Field(None, alias="pattern")  # noqa: UP045
+
+
+class DataModelField(DataModelFieldBase):
+    """Field implementation for Pydantic v1 models."""
+
+    _EXCLUDE_FIELD_KEYS: ClassVar[set[str]] = {
+        "alias",
+        "default",
+        "const",
+        "gt",
+        "ge",
+        "lt",
+        "le",
+        "multiple_of",
+        "min_items",
+        "max_items",
+        "min_length",
+        "max_length",
+        "regex",
+    }
+    _COMPARE_EXPRESSIONS: ClassVar[set[str]] = {"gt", "ge", "lt", "le"}
+    constraints: Optional[Constraints] = None  # noqa: UP045
+    _PARSE_METHOD: ClassVar[str] = "parse_obj"
+
+    @property
+    def has_default_factory_in_field(self) -> bool:
+        """Check if this field has a default_factory in Field() including computed ones."""
+        return "default_factory" in self.extras or self.__dict__.get("_computed_default_factory") is not None
+
+    @property
+    def method(self) -> str | None:
+        """Get the validation method name."""
+        return self.validator
+
+    @property
+    def validator(self) -> str | None:
+        """Get the validator name."""
+        return None
+        # TODO refactor this method for other validation logic
+
+    @property
+    def field(self) -> str | None:
+        """For backwards compatibility."""
+        result = str(self)
+        if (
+            self.use_default_kwarg
+            and not result.startswith("Field(...")
+            and not result.startswith("Field(default_factory=")
+        ):
+            # Use `default=` for fields that have a default value so that type
+            # checkers using @dataclass_transform can infer the field as
+            # optional in __init__.
+            result = result.replace("Field(", "Field(default=")
+        if not result:
+            return None
+        return result
+
+    def _get_strict_field_constraint_value(self, constraint: str, value: Any) -> Any:
+        if value is None or constraint not in self._COMPARE_EXPRESSIONS:
+            return value
+
+        is_float_type = any(
+            data_type.type == "float"
+            or (data_type.strict and data_type.import_ and "Float" in data_type.import_.import_)
+            for data_type in self.data_type.all_data_types
+        )
+        if is_float_type:
+            return float(value)
+        str_value = str(value)
+        if "e" in str_value.lower():  # pragma: no cover
+            # Scientific notation like 1e-08 - keep as float
+            return float(value)
+        if isinstance(value, int) and not isinstance(value, bool):  # pragma: no branch
+            return value
+        return int(value)
+
+    def _get_default_as_pydantic_model(self) -> str | None:
+        for data_type in self.data_type.data_types or (self.data_type,):
+            # TODO: Check nested data_types
+            if data_type.is_dict:
+                # TODO: Parse dict model for default
+                continue
+            if data_type.is_list and len(data_type.data_types) == 1:
+                data_type_child = data_type.data_types[0]
+                if (
+                    data_type_child.reference
+                    and isinstance(data_type_child.reference.source, BaseModelBase)
+                    and isinstance(self.default, list)
+                ):  # pragma: no cover
+                    if not self.default:
+                        return STANDARD_LIST
+                    return (
+                        f"lambda :[{data_type_child.alias or data_type_child.reference.source.class_name}."
+                        f"{self._PARSE_METHOD}(v) for v in {self.default!r}]"
+                    )
+            elif data_type.reference and isinstance(data_type.reference.source, BaseModelBase):
+                if self.data_type.is_union:
+                    if not isinstance(self.default, (dict, list)):
+                        continue
+                    if isinstance(self.default, dict) and any(dt.is_dict for dt in self.data_type.data_types):
+                        continue
+                return (
+                    f"lambda :{data_type.alias or data_type.reference.source.class_name}."
+                    f"{self._PARSE_METHOD}({self.default!r})"
+                )
+        return None
+
+    def _process_data_in_str(self, data: dict[str, Any]) -> None:
+        if self.const:
+            data["const"] = True
+
+        if self.use_frozen_field and self.read_only:
+            data["allow_mutation"] = False
+
+    def _process_annotated_field_arguments(self, field_arguments: list[str]) -> list[str]:  # noqa: PLR6301
+        return field_arguments
+
+    def __str__(self) -> str:  # noqa: PLR0912
+        """Return Field() call with all constraints and metadata."""
+        data: dict[str, Any] = {k: v for k, v in self.extras.items() if k not in self._EXCLUDE_FIELD_KEYS}
+        if self.alias:
+            data["alias"] = self.alias
+        has_type_constraints = self.data_type.kwargs is not None and len(self.data_type.kwargs) > 0
+        if (
+            self.constraints is not None
+            and not self.self_reference()
+            and not (self.data_type.strict and has_type_constraints)
+        ):
+            data = {
+                **data,
+                **(
+                    {}
+                    if any(d.import_ == IMPORT_ANYURL for d in self.data_type.all_data_types)
+                    else {
+                        k: self._get_strict_field_constraint_value(k, v)
+                        for k, v in self.constraints.dict(exclude_unset=True).items()
+                    }
+                ),
+            }
+
+        if self.use_field_description:
+            data.pop("description", None)  # Description is part of field docstring
+
+        self._process_data_in_str(data)
+
+        discriminator = data.pop("discriminator", None)
+        if discriminator:
+            if isinstance(discriminator, str):
+                data["discriminator"] = discriminator
+            elif isinstance(discriminator, dict):  # pragma: no cover
+                data["discriminator"] = discriminator["propertyName"]
+
+        if self.required:
+            default_factory = None
+        elif self.default is not UNDEFINED and self.default is not None and "default_factory" not in data:
+            default_factory = self._get_default_as_pydantic_model()
+        else:
+            default_factory = data.pop("default_factory", None)
+
+        self.__dict__["_computed_default_factory"] = default_factory
+
+        field_arguments = sorted(f"{k}={v!r}" for k, v in data.items() if v is not None)
+
+        if not field_arguments and not default_factory:
+            if self.nullable and self.required:
+                return "Field(...)"  # Field() is for mypy
+            return ""
+
+        if default_factory:
+            field_arguments = [f"default_factory={default_factory}", *field_arguments]
+
+        if self.use_annotated:
+            field_arguments = self._process_annotated_field_arguments(field_arguments)
+        elif self.required:
+            field_arguments = ["...", *field_arguments]
+        elif not default_factory:
+            from datamodel_code_generator.model.base import repr_set_sorted  # noqa: PLC0415
+
+            default_repr = repr_set_sorted(self.default) if isinstance(self.default, set) else repr(self.default)
+            field_arguments = [default_repr, *field_arguments]
+
+        return f"Field({', '.join(field_arguments)})"
+
+    @property
+    def annotated(self) -> str | None:
+        """Get the Annotated type hint if use_annotated is enabled."""
+        if not self.use_annotated or not str(self):
+            return None
+        return f"Annotated[{self.type_hint}, {self!s}]"
+
+    @property
+    def imports(self) -> tuple[Import, ...]:
+        """Get all required imports including Field if needed."""
+        if self.field:
+            return chain_as_tuple(super().imports, (IMPORT_FIELD,))
+        return super().imports
+
+
+class BaseModelBase(DataModel, ABC):
+    """Abstract base class for Pydantic BaseModel implementations."""
+
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, Any] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        treat_dot_as_module: bool = False,
+    ) -> None:
+        """Initialize the BaseModel with fields and configuration."""
+        methods: list[str] = [field.method for field in fields if field.method]
+
+        super().__init__(
+            fields=fields,
+            reference=reference,
+            decorators=decorators,
+            base_classes=base_classes,
+            custom_base_class=custom_base_class,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            methods=methods,
+            path=path,
+            description=description,
+            default=default,
+            nullable=nullable,
+            keyword_only=keyword_only,
+            treat_dot_as_module=treat_dot_as_module,
+        )
+
+    @cached_property
+    def template_file_path(self) -> Path:
+        """Get the template file path with backward compatibility support."""
+        # This property is for Backward compatibility
+        # Current version supports '{custom_template_dir}/BaseModel.jinja'
+        # But, Future version will support only '{custom_template_dir}/pydantic/BaseModel.jinja'
+        if self._custom_template_dir is not None:
+            custom_template_file_path = self._custom_template_dir / Path(self.TEMPLATE_FILE_PATH).name
+            if custom_template_file_path.exists():
+                return custom_template_file_path
+        return super().template_file_path
+
+
+class BaseModel(BaseModelBase):
+    """Pydantic v1 BaseModel implementation."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic/BaseModel.jinja2"
+    BASE_CLASS: ClassVar[str] = "pydantic.BaseModel"
+
+    def __init__(  # noqa: PLR0912, PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, Any] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        treat_dot_as_module: bool = False,
+    ) -> None:
+        """Initialize the BaseModel with Config and extra fields support."""
+        super().__init__(
+            reference=reference,
+            fields=fields,
+            decorators=decorators,
+            base_classes=base_classes,
+            custom_base_class=custom_base_class,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            path=path,
+            description=description,
+            default=default,
+            nullable=nullable,
+            keyword_only=keyword_only,
+            treat_dot_as_module=treat_dot_as_module,
+        )
+        config_parameters: dict[str, Any] = {}
+
+        additional_properties = self.extra_template_data.get("additionalProperties")
+        allow_extra_fields = self.extra_template_data.get("allow_extra_fields")
+        extra_fields = self.extra_template_data.get("extra_fields")
+
+        if allow_extra_fields or extra_fields or additional_properties is not None:
+            self._additional_imports.append(IMPORT_EXTRA)
+
+        if allow_extra_fields:
+            config_parameters["extra"] = "Extra.allow"
+        elif extra_fields:
+            config_parameters["extra"] = f"Extra.{extra_fields}"
+        elif additional_properties is True:
+            config_parameters["extra"] = "Extra.allow"
+        elif additional_properties is False:
+            config_parameters["extra"] = "Extra.forbid"
+
+        for config_attribute in "allow_population_by_field_name", "allow_mutation":
+            if config_attribute in self.extra_template_data:
+                config_parameters[config_attribute] = self.extra_template_data[config_attribute]
+
+        if "validate_assignment" not in config_parameters and any(
+            field.use_frozen_field and field.read_only for field in self.fields
+        ):
+            config_parameters["validate_assignment"] = True
+
+        for data_type in self.all_data_types:
+            if data_type.is_custom_type:  # pragma: no cover
+                config_parameters["arbitrary_types_allowed"] = True
+                break
+
+        if isinstance(self.extra_template_data.get("config"), dict):
+            for key, value in self.extra_template_data["config"].items():
+                config_parameters[key] = value  # noqa: PERF403
+
+        if config_parameters:
+            from datamodel_code_generator.model.pydantic import Config  # noqa: PLC0415
+
+            self.extra_template_data["config"] = Config.parse_obj(config_parameters)  # pyright: ignore[reportArgumentType]
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic/custom_root_type.py 0.45.0-1/src/datamodel_code_generator/model/pydantic/custom_root_type.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic/custom_root_type.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/pydantic/custom_root_type.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+"""Pydantic v1 custom root type model.
+
+Generates models with __root__ field for wrapping single types.
+"""
+
+from __future__ import annotations
+
+from typing import ClassVar
+
+from datamodel_code_generator.model.pydantic.base_model import BaseModel
+
+
+class CustomRootType(BaseModel):
+    """DataModel for Pydantic v1 custom root types (__root__ field)."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic/BaseModel_root.jinja2"
+    BASE_CLASS: ClassVar[str] = "pydantic.BaseModel"
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic/dataclass.py 0.45.0-1/src/datamodel_code_generator/model/pydantic/dataclass.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic/dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/pydantic/dataclass.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+"""Pydantic v1 dataclass model.
+
+Generates pydantic.dataclasses.dataclass decorated classes.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, ClassVar
+
+from datamodel_code_generator.model import DataModel
+from datamodel_code_generator.model.pydantic.imports import IMPORT_DATACLASS
+
+if TYPE_CHECKING:
+    from datamodel_code_generator.imports import Import
+
+
+class DataClass(DataModel):
+    """DataModel for Pydantic v1 dataclasses."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic/dataclass.jinja2"
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_DATACLASS,)
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic/imports.py 0.45.0-1/src/datamodel_code_generator/model/pydantic/imports.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic/imports.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/pydantic/imports.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,42 @@
+"""Import definitions for Pydantic v1 types.
+
+Provides pre-defined Import objects for Pydantic v1 types (constr, AnyUrl, etc.).
+"""
+
+from __future__ import annotations
+
+from datamodel_code_generator.imports import Import
+
+IMPORT_CONSTR = Import.from_full_path("pydantic.constr")
+IMPORT_CONINT = Import.from_full_path("pydantic.conint")
+IMPORT_CONFLOAT = Import.from_full_path("pydantic.confloat")
+IMPORT_CONDECIMAL = Import.from_full_path("pydantic.condecimal")
+IMPORT_CONBYTES = Import.from_full_path("pydantic.conbytes")
+IMPORT_POSITIVE_INT = Import.from_full_path("pydantic.PositiveInt")
+IMPORT_NEGATIVE_INT = Import.from_full_path("pydantic.NegativeInt")
+IMPORT_NON_POSITIVE_INT = Import.from_full_path("pydantic.NonPositiveInt")
+IMPORT_NON_NEGATIVE_INT = Import.from_full_path("pydantic.NonNegativeInt")
+IMPORT_POSITIVE_FLOAT = Import.from_full_path("pydantic.PositiveFloat")
+IMPORT_NEGATIVE_FLOAT = Import.from_full_path("pydantic.NegativeFloat")
+IMPORT_NON_NEGATIVE_FLOAT = Import.from_full_path("pydantic.NonNegativeFloat")
+IMPORT_NON_POSITIVE_FLOAT = Import.from_full_path("pydantic.NonPositiveFloat")
+IMPORT_SECRET_STR = Import.from_full_path("pydantic.SecretStr")
+IMPORT_EMAIL_STR = Import.from_full_path("pydantic.EmailStr")
+IMPORT_UUID1 = Import.from_full_path("pydantic.UUID1")
+IMPORT_UUID2 = Import.from_full_path("pydantic.UUID2")
+IMPORT_UUID3 = Import.from_full_path("pydantic.UUID3")
+IMPORT_UUID4 = Import.from_full_path("pydantic.UUID4")
+IMPORT_UUID5 = Import.from_full_path("pydantic.UUID5")
+IMPORT_ANYURL = Import.from_full_path("pydantic.AnyUrl")
+IMPORT_IPV4ADDRESS = Import.from_full_path("ipaddress.IPv4Address")
+IMPORT_IPV6ADDRESS = Import.from_full_path("ipaddress.IPv6Address")
+IMPORT_IPV4NETWORKS = Import.from_full_path("ipaddress.IPv4Network")
+IMPORT_IPV6NETWORKS = Import.from_full_path("ipaddress.IPv6Network")
+IMPORT_EXTRA = Import.from_full_path("pydantic.Extra")
+IMPORT_FIELD = Import.from_full_path("pydantic.Field")
+IMPORT_STRICT_INT = Import.from_full_path("pydantic.StrictInt")
+IMPORT_STRICT_FLOAT = Import.from_full_path("pydantic.StrictFloat")
+IMPORT_STRICT_STR = Import.from_full_path("pydantic.StrictStr")
+IMPORT_STRICT_BOOL = Import.from_full_path("pydantic.StrictBool")
+IMPORT_STRICT_BYTES = Import.from_full_path("pydantic.StrictBytes")
+IMPORT_DATACLASS = Import.from_full_path("pydantic.dataclasses.dataclass")
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic/types.py 0.45.0-1/src/datamodel_code_generator/model/pydantic/types.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic/types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/pydantic/types.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,353 @@
+"""Pydantic v1 type manager.
+
+Maps schema types to Pydantic v1 specific types (constr, conint, AnyUrl, etc.).
+"""
+
+from __future__ import annotations
+
+from decimal import Decimal
+from typing import TYPE_CHECKING, Any, ClassVar
+
+from datamodel_code_generator.format import DatetimeClassType, PythonVersion, PythonVersionMin
+from datamodel_code_generator.imports import (
+    IMPORT_ANY,
+    IMPORT_DATE,
+    IMPORT_DATETIME,
+    IMPORT_DECIMAL,
+    IMPORT_PATH,
+    IMPORT_PENDULUM_DATE,
+    IMPORT_PENDULUM_DATETIME,
+    IMPORT_PENDULUM_DURATION,
+    IMPORT_PENDULUM_TIME,
+    IMPORT_TIME,
+    IMPORT_TIMEDELTA,
+    IMPORT_UUID,
+)
+from datamodel_code_generator.model.pydantic.imports import (
+    IMPORT_ANYURL,
+    IMPORT_CONBYTES,
+    IMPORT_CONDECIMAL,
+    IMPORT_CONFLOAT,
+    IMPORT_CONINT,
+    IMPORT_CONSTR,
+    IMPORT_EMAIL_STR,
+    IMPORT_IPV4ADDRESS,
+    IMPORT_IPV4NETWORKS,
+    IMPORT_IPV6ADDRESS,
+    IMPORT_IPV6NETWORKS,
+    IMPORT_NEGATIVE_FLOAT,
+    IMPORT_NEGATIVE_INT,
+    IMPORT_NON_NEGATIVE_FLOAT,
+    IMPORT_NON_NEGATIVE_INT,
+    IMPORT_NON_POSITIVE_FLOAT,
+    IMPORT_NON_POSITIVE_INT,
+    IMPORT_POSITIVE_FLOAT,
+    IMPORT_POSITIVE_INT,
+    IMPORT_SECRET_STR,
+    IMPORT_STRICT_BOOL,
+    IMPORT_STRICT_BYTES,
+    IMPORT_STRICT_FLOAT,
+    IMPORT_STRICT_INT,
+    IMPORT_STRICT_STR,
+    IMPORT_UUID1,
+    IMPORT_UUID2,
+    IMPORT_UUID3,
+    IMPORT_UUID4,
+    IMPORT_UUID5,
+)
+from datamodel_code_generator.types import DataType, StrictTypes, Types, UnionIntFloat
+from datamodel_code_generator.types import DataTypeManager as _DataTypeManager
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+
+def type_map_factory(
+    data_type: type[DataType],
+    strict_types: Sequence[StrictTypes],
+    pattern_key: str,
+    use_pendulum: bool,  # noqa: FBT001
+) -> dict[Types, DataType]:
+    """Create a mapping of schema types to Pydantic v1 data types."""
+    data_type_int = data_type(type="int")
+    data_type_float = data_type(type="float")
+    data_type_str = data_type(type="str")
+    result = {
+        Types.integer: data_type_int,
+        Types.int32: data_type_int,
+        Types.int64: data_type_int,
+        Types.number: data_type_float,
+        Types.float: data_type_float,
+        Types.double: data_type_float,
+        Types.decimal: data_type.from_import(IMPORT_DECIMAL),
+        Types.time: data_type.from_import(IMPORT_TIME),
+        Types.string: data_type_str,
+        Types.byte: data_type_str,  # base64 encoded string
+        Types.binary: data_type(type="bytes"),
+        Types.date: data_type.from_import(IMPORT_DATE),
+        Types.date_time: data_type.from_import(IMPORT_DATETIME),
+        Types.timedelta: data_type.from_import(IMPORT_TIMEDELTA),
+        Types.path: data_type.from_import(IMPORT_PATH),
+        Types.password: data_type.from_import(IMPORT_SECRET_STR),
+        Types.email: data_type.from_import(IMPORT_EMAIL_STR),
+        Types.uuid: data_type.from_import(IMPORT_UUID),
+        Types.uuid1: data_type.from_import(IMPORT_UUID1),
+        Types.uuid2: data_type.from_import(IMPORT_UUID2),
+        Types.uuid3: data_type.from_import(IMPORT_UUID3),
+        Types.uuid4: data_type.from_import(IMPORT_UUID4),
+        Types.uuid5: data_type.from_import(IMPORT_UUID5),
+        Types.uri: data_type.from_import(IMPORT_ANYURL),
+        Types.hostname: data_type.from_import(
+            IMPORT_CONSTR,
+            strict=StrictTypes.str in strict_types,
+            # https://github.com/horejsek/python-fastjsonschema/blob/61c6997a8348b8df9b22e029ca2ba35ef441fbb8/fastjsonschema/draft04.py#L31
+            kwargs={
+                pattern_key: r"r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*"
+                r"([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])\Z'",
+                **({"strict": True} if StrictTypes.str in strict_types else {}),
+            },
+        ),
+        Types.ipv4: data_type.from_import(IMPORT_IPV4ADDRESS),
+        Types.ipv6: data_type.from_import(IMPORT_IPV6ADDRESS),
+        Types.ipv4_network: data_type.from_import(IMPORT_IPV4NETWORKS),
+        Types.ipv6_network: data_type.from_import(IMPORT_IPV6NETWORKS),
+        Types.boolean: data_type(type="bool"),
+        Types.object: data_type.from_import(IMPORT_ANY, is_dict=True),
+        Types.null: data_type(type="None"),
+        Types.array: data_type.from_import(IMPORT_ANY, is_list=True),
+        Types.any: data_type.from_import(IMPORT_ANY),
+    }
+    if use_pendulum:
+        result[Types.date] = data_type.from_import(IMPORT_PENDULUM_DATE)
+        result[Types.date_time] = data_type.from_import(IMPORT_PENDULUM_DATETIME)
+        result[Types.time] = data_type.from_import(IMPORT_PENDULUM_TIME)
+        result[Types.timedelta] = data_type.from_import(IMPORT_PENDULUM_DURATION)
+
+    return result
+
+
+def strict_type_map_factory(data_type: type[DataType]) -> dict[StrictTypes, DataType]:
+    """Create a mapping of strict types to Pydantic v1 strict data types."""
+    return {
+        StrictTypes.int: data_type.from_import(IMPORT_STRICT_INT, strict=True),
+        StrictTypes.float: data_type.from_import(IMPORT_STRICT_FLOAT, strict=True),
+        StrictTypes.bytes: data_type.from_import(IMPORT_STRICT_BYTES, strict=True),
+        StrictTypes.bool: data_type.from_import(IMPORT_STRICT_BOOL, strict=True),
+        StrictTypes.str: data_type.from_import(IMPORT_STRICT_STR, strict=True),
+    }
+
+
+number_kwargs: set[str] = {
+    "exclusiveMinimum",
+    "minimum",
+    "exclusiveMaximum",
+    "maximum",
+    "multipleOf",
+}
+
+string_kwargs: set[str] = {"minItems", "maxItems", "minLength", "maxLength", "pattern"}
+
+bytes_kwargs: set[str] = {"minLength", "maxLength"}
+
+escape_characters = str.maketrans({
+    "'": r"\'",
+    "\b": r"\b",
+    "\f": r"\f",
+    "\n": r"\n",
+    "\r": r"\r",
+    "\t": r"\t",
+})
+
+
+class DataTypeManager(_DataTypeManager):
+    """Manage data type mappings for Pydantic v1 models."""
+
+    PATTERN_KEY: ClassVar[str] = "regex"
+
+    def __init__(  # noqa: PLR0913, PLR0917
+        self,
+        python_version: PythonVersion = PythonVersionMin,
+        use_standard_collections: bool = False,  # noqa: FBT001, FBT002
+        use_generic_container_types: bool = False,  # noqa: FBT001, FBT002
+        strict_types: Sequence[StrictTypes] | None = None,
+        use_non_positive_negative_number_constrained_types: bool = False,  # noqa: FBT001, FBT002
+        use_decimal_for_multiple_of: bool = False,  # noqa: FBT001, FBT002
+        use_union_operator: bool = False,  # noqa: FBT001, FBT002
+        use_pendulum: bool = False,  # noqa: FBT001, FBT002
+        target_datetime_class: DatetimeClassType | None = None,
+        treat_dot_as_module: bool = False,  # noqa: FBT001, FBT002
+        use_serialize_as_any: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
+        """Initialize the DataTypeManager with Pydantic v1 type mappings."""
+        super().__init__(
+            python_version,
+            use_standard_collections,
+            use_generic_container_types,
+            strict_types,
+            use_non_positive_negative_number_constrained_types,
+            use_decimal_for_multiple_of,
+            use_union_operator,
+            use_pendulum,
+            target_datetime_class,
+            treat_dot_as_module,
+            use_serialize_as_any,
+        )
+
+        self.type_map: dict[Types, DataType] = self.type_map_factory(
+            self.data_type,
+            strict_types=self.strict_types,
+            pattern_key=self.PATTERN_KEY,
+            target_datetime_class=self.target_datetime_class,
+        )
+        self.strict_type_map: dict[StrictTypes, DataType] = strict_type_map_factory(
+            self.data_type,
+        )
+
+        self.kwargs_schema_to_model: dict[str, str] = {
+            "exclusiveMinimum": "gt",
+            "minimum": "ge",
+            "exclusiveMaximum": "lt",
+            "maximum": "le",
+            "multipleOf": "multiple_of",
+            "minItems": "min_items",
+            "maxItems": "max_items",
+            "minLength": "min_length",
+            "maxLength": "max_length",
+            "pattern": self.PATTERN_KEY,
+        }
+
+    def type_map_factory(
+        self,
+        data_type: type[DataType],
+        strict_types: Sequence[StrictTypes],
+        pattern_key: str,
+        target_datetime_class: DatetimeClassType | None,  # noqa: ARG002
+    ) -> dict[Types, DataType]:
+        """Create type mapping with Pydantic v1 specific types."""
+        return type_map_factory(
+            data_type,
+            strict_types,
+            pattern_key,
+            self.use_pendulum,
+        )
+
+    def transform_kwargs(self, kwargs: dict[str, Any], filter_: set[str]) -> dict[str, str]:
+        """Transform schema kwargs to Pydantic v1 field kwargs."""
+        return {self.kwargs_schema_to_model.get(k, k): v for (k, v) in kwargs.items() if v is not None and k in filter_}
+
+    def get_data_int_type(  # noqa: PLR0911
+        self,
+        types: Types,
+        **kwargs: Any,
+    ) -> DataType:
+        """Get int data type with constraints (conint, PositiveInt, etc.)."""
+        data_type_kwargs: dict[str, Any] = self.transform_kwargs(kwargs, number_kwargs)
+        strict = StrictTypes.int in self.strict_types
+        if data_type_kwargs:
+            if not strict:
+                if data_type_kwargs == {"gt": 0}:
+                    return self.data_type.from_import(IMPORT_POSITIVE_INT)
+                if data_type_kwargs == {"lt": 0}:
+                    return self.data_type.from_import(IMPORT_NEGATIVE_INT)
+                if data_type_kwargs == {"ge": 0} and self.use_non_positive_negative_number_constrained_types:
+                    return self.data_type.from_import(IMPORT_NON_NEGATIVE_INT)
+                if data_type_kwargs == {"le": 0} and self.use_non_positive_negative_number_constrained_types:
+                    return self.data_type.from_import(IMPORT_NON_POSITIVE_INT)
+            kwargs = {k: int(v) for k, v in data_type_kwargs.items()}
+            if strict:
+                kwargs["strict"] = True
+            return self.data_type.from_import(IMPORT_CONINT, kwargs=kwargs)
+        if strict:
+            return self.strict_type_map[StrictTypes.int]
+        return self.type_map[types]
+
+    def get_data_float_type(  # noqa: PLR0911
+        self,
+        types: Types,
+        **kwargs: Any,
+    ) -> DataType:
+        """Get float data type with constraints (confloat, PositiveFloat, etc.)."""
+        data_type_kwargs = self.transform_kwargs(kwargs, number_kwargs)
+        strict = StrictTypes.float in self.strict_types
+        if data_type_kwargs:
+            # Use Decimal instead of float when multipleOf is present to avoid floating-point precision issues
+            if self.use_decimal_for_multiple_of and "multiple_of" in data_type_kwargs:
+                return self.data_type.from_import(
+                    IMPORT_CONDECIMAL,
+                    kwargs={k: Decimal(str(v)) for k, v in data_type_kwargs.items()},
+                )
+            if not strict:
+                if data_type_kwargs == {"gt": 0}:
+                    return self.data_type.from_import(IMPORT_POSITIVE_FLOAT)
+                if data_type_kwargs == {"lt": 0}:
+                    return self.data_type.from_import(IMPORT_NEGATIVE_FLOAT)
+                if data_type_kwargs == {"ge": 0} and self.use_non_positive_negative_number_constrained_types:
+                    return self.data_type.from_import(IMPORT_NON_NEGATIVE_FLOAT)
+                if data_type_kwargs == {"le": 0} and self.use_non_positive_negative_number_constrained_types:
+                    return self.data_type.from_import(IMPORT_NON_POSITIVE_FLOAT)
+            kwargs = {k: float(v) for k, v in data_type_kwargs.items()}
+            if strict:
+                kwargs["strict"] = True
+            return self.data_type.from_import(IMPORT_CONFLOAT, kwargs=kwargs)
+        if strict:
+            return self.strict_type_map[StrictTypes.float]
+        return self.type_map[types]
+
+    def get_data_decimal_type(self, types: Types, **kwargs: Any) -> DataType:
+        """Get decimal data type with constraints (condecimal)."""
+        data_type_kwargs = self.transform_kwargs(kwargs, number_kwargs)
+        if data_type_kwargs:
+            return self.data_type.from_import(
+                IMPORT_CONDECIMAL,
+                kwargs={k: Decimal(str(v) if isinstance(v, UnionIntFloat) else v) for k, v in data_type_kwargs.items()},
+            )
+        return self.type_map[types]
+
+    def get_data_str_type(self, types: Types, **kwargs: Any) -> DataType:
+        """Get string data type with constraints (constr)."""
+        data_type_kwargs: dict[str, Any] = self.transform_kwargs(kwargs, string_kwargs)
+        strict = StrictTypes.str in self.strict_types
+        if data_type_kwargs:
+            if strict:
+                data_type_kwargs["strict"] = True
+            if self.PATTERN_KEY in data_type_kwargs:
+                escaped_regex = data_type_kwargs[self.PATTERN_KEY].translate(escape_characters)
+                # TODO: remove unneeded escaped characters
+                data_type_kwargs[self.PATTERN_KEY] = f"r'{escaped_regex}'"
+            return self.data_type.from_import(IMPORT_CONSTR, kwargs=data_type_kwargs)
+        if strict:
+            return self.strict_type_map[StrictTypes.str]
+        return self.type_map[types]
+
+    def get_data_bytes_type(self, types: Types, **kwargs: Any) -> DataType:
+        """Get bytes data type with constraints (conbytes)."""
+        data_type_kwargs: dict[str, Any] = self.transform_kwargs(kwargs, bytes_kwargs)
+        strict = StrictTypes.bytes in self.strict_types
+        if data_type_kwargs and not strict:
+            return self.data_type.from_import(IMPORT_CONBYTES, kwargs=data_type_kwargs)
+        # conbytes doesn't accept strict argument
+        # https://github.com/samuelcolvin/pydantic/issues/2489
+        if strict:
+            return self.strict_type_map[StrictTypes.bytes]
+        return self.type_map[types]
+
+    def get_data_type(  # noqa: PLR0911
+        self,
+        types: Types,
+        **kwargs: Any,
+    ) -> DataType:
+        """Get data type with appropriate constraints for the given type."""
+        if types == Types.string:
+            return self.get_data_str_type(types, **kwargs)
+        if types in {Types.int32, Types.int64, Types.integer}:
+            return self.get_data_int_type(types, **kwargs)
+        if types in {Types.float, Types.double, Types.number, Types.time}:
+            return self.get_data_float_type(types, **kwargs)
+        if types == Types.decimal:
+            return self.get_data_decimal_type(types, **kwargs)
+        if types == Types.binary:
+            return self.get_data_bytes_type(types, **kwargs)
+        if types == Types.boolean and StrictTypes.bool in self.strict_types:
+            return self.strict_type_map[StrictTypes.bool]
+
+        return self.type_map[types]
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/__init__.py 0.45.0-1/src/datamodel_code_generator/model/pydantic_v2/__init__.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/pydantic_v2/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,51 @@
+"""Pydantic v2 model generator.
+
+Provides BaseModel, RootModel, and DataModelField for generating
+Pydantic v2 compatible data models with ConfigDict support.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Optional
+
+from pydantic import BaseModel as _BaseModel
+
+from .base_model import BaseModel, DataModelField, UnionMode
+from .root_model import RootModel
+from .types import DataTypeManager
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable
+
+
+def dump_resolve_reference_action(class_names: Iterable[str]) -> str:
+    """Generate model_rebuild() calls for Pydantic v2 models."""
+    return "\n".join(f"{class_name}.model_rebuild()" for class_name in class_names)
+
+
+class ConfigDict(_BaseModel):
+    """Pydantic v2 model_config options."""
+
+    extra: Optional[str] = None  # noqa: UP045
+    title: Optional[str] = None  # noqa: UP045
+    populate_by_name: Optional[bool] = None  # noqa: UP045
+    allow_extra_fields: Optional[bool] = None  # noqa: UP045
+    extra_fields: Optional[str] = None  # noqa: UP045
+    from_attributes: Optional[bool] = None  # noqa: UP045
+    frozen: Optional[bool] = None  # noqa: UP045
+    arbitrary_types_allowed: Optional[bool] = None  # noqa: UP045
+    protected_namespaces: Optional[tuple[str, ...]] = None  # noqa: UP045
+    regex_engine: Optional[str] = None  # noqa: UP045
+    use_enum_values: Optional[bool] = None  # noqa: UP045
+    coerce_numbers_to_str: Optional[bool] = None  # noqa: UP045
+    use_attribute_docstrings: Optional[bool] = None  # noqa: UP045
+
+
+__all__ = [
+    "BaseModel",
+    "DataModelField",
+    "DataTypeManager",
+    "RootModel",
+    "UnionMode",
+    "dump_resolve_reference_action",
+]
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/base_model.py 0.45.0-1/src/datamodel_code_generator/model/pydantic_v2/base_model.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/base_model.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/pydantic_v2/base_model.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,268 @@
+"""Pydantic v2 BaseModel implementation.
+
+Provides Constraints, DataModelField, and BaseModel for Pydantic v2
+with support for Field() constraints and ConfigDict.
+"""
+
+from __future__ import annotations
+
+import re
+from enum import Enum
+from typing import TYPE_CHECKING, Any, ClassVar, NamedTuple, Optional
+
+from pydantic import Field
+from typing_extensions import Literal
+
+from datamodel_code_generator.model.base import UNDEFINED, DataModelFieldBase
+from datamodel_code_generator.model.pydantic.base_model import (
+    BaseModelBase,
+)
+from datamodel_code_generator.model.pydantic.base_model import (
+    Constraints as _Constraints,
+)
+from datamodel_code_generator.model.pydantic.base_model import (
+    DataModelField as DataModelFieldV1,
+)
+from datamodel_code_generator.model.pydantic_v2.imports import IMPORT_CONFIG_DICT
+from datamodel_code_generator.util import field_validator, model_validator
+
+if TYPE_CHECKING:
+    from collections import defaultdict
+    from pathlib import Path
+
+    from datamodel_code_generator.reference import Reference
+
+
+class UnionMode(Enum):
+    """Union discriminator mode for Pydantic v2."""
+
+    smart = "smart"
+    left_to_right = "left_to_right"
+
+
+class Constraints(_Constraints):
+    """Pydantic v2 field constraints with pattern support."""
+
+    # To override existing pattern alias
+    regex: Optional[str] = Field(None, alias="regex")  # noqa: UP045
+    pattern: Optional[str] = Field(None, alias="pattern")  # noqa: UP045
+
+    @model_validator(mode="before")
+    def validate_min_max_items(cls, values: Any) -> dict[str, Any]:  # noqa: N805
+        """Validate and convert minItems/maxItems to minLength/maxLength."""
+        if not isinstance(values, dict):  # pragma: no cover
+            return values
+        min_items = values.pop("minItems", None)
+        if min_items is not None:
+            values["minLength"] = min_items
+        max_items = values.pop("maxItems", None)
+        if max_items is not None:
+            values["maxLength"] = max_items
+        return values
+
+
+class DataModelField(DataModelFieldV1):
+    """Pydantic v2 field with Field() constraints and json_schema_extra support."""
+
+    _EXCLUDE_FIELD_KEYS: ClassVar[set[str]] = {
+        "alias",
+        "default",
+        "gt",
+        "ge",
+        "lt",
+        "le",
+        "multiple_of",
+        "min_length",
+        "max_length",
+        "pattern",
+    }
+    _DEFAULT_FIELD_KEYS: ClassVar[set[str]] = {
+        "default",
+        "default_factory",
+        "alias",
+        "alias_priority",
+        "validation_alias",
+        "serialization_alias",
+        "title",
+        "description",
+        "examples",
+        "exclude",
+        "discriminator",
+        "json_schema_extra",
+        "frozen",
+        "validate_default",
+        "repr",
+        "init_var",
+        "kw_only",
+        "pattern",
+        "strict",
+        "gt",
+        "ge",
+        "lt",
+        "le",
+        "multiple_of",
+        "allow_inf_nan",
+        "max_digits",
+        "decimal_places",
+        "min_length",
+        "max_length",
+        "union_mode",
+    }
+    constraints: Optional[Constraints] = None  # pyright: ignore[reportIncompatibleVariableOverride]  # noqa: UP045
+    _PARSE_METHOD: ClassVar[str] = "model_validate"
+    can_have_extra_keys: ClassVar[bool] = False
+
+    @field_validator("extras")
+    def validate_extras(cls, values: Any) -> dict[str, Any]:  # noqa: N805
+        """Validate and convert example to examples list."""
+        if not isinstance(values, dict):  # pragma: no cover
+            return values
+        if "examples" in values:
+            return values
+
+        if "example" in values:
+            values["examples"] = [values.pop("example")]
+        return values
+
+    def process_const(self) -> None:
+        """Process const field constraint using literal type."""
+        self._process_const_as_literal()
+
+    def _process_data_in_str(self, data: dict[str, Any]) -> None:
+        if self.const:
+            # const is removed in pydantic 2.0
+            data.pop("const")
+
+        # unique_items is not supported in pydantic 2.0
+        data.pop("unique_items", None)
+
+        if self.use_frozen_field and self.read_only:
+            data["frozen"] = True
+
+        if "union_mode" in data:
+            if self.data_type.is_union:
+                data["union_mode"] = data.pop("union_mode").value
+            else:
+                data.pop("union_mode")
+
+        # **extra is not supported in pydantic 2.0
+        json_schema_extra = {k: v for k, v in data.items() if k not in self._DEFAULT_FIELD_KEYS}
+        if json_schema_extra:
+            data["json_schema_extra"] = json_schema_extra
+            for key in json_schema_extra:
+                data.pop(key)
+
+    def _process_annotated_field_arguments(  # noqa: PLR6301
+        self,
+        field_arguments: list[str],
+    ) -> list[str]:
+        return field_arguments
+
+
+class ConfigAttribute(NamedTuple):
+    """Configuration attribute mapping for ConfigDict conversion."""
+
+    from_: str
+    to: str
+    invert: bool
+
+
+class BaseModel(BaseModelBase):
+    """Pydantic v2 BaseModel with ConfigDict and pattern-based regex_engine support."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic_v2/BaseModel.jinja2"
+    BASE_CLASS: ClassVar[str] = "pydantic.BaseModel"
+    CONFIG_ATTRIBUTES: ClassVar[list[ConfigAttribute]] = [
+        ConfigAttribute("allow_population_by_field_name", "populate_by_name", False),  # noqa: FBT003
+        ConfigAttribute("populate_by_name", "populate_by_name", False),  # noqa: FBT003
+        ConfigAttribute("allow_mutation", "frozen", True),  # noqa: FBT003
+        ConfigAttribute("frozen", "frozen", False),  # noqa: FBT003
+        ConfigAttribute("use_attribute_docstrings", "use_attribute_docstrings", False),  # noqa: FBT003
+    ]
+
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, Any] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        treat_dot_as_module: bool = False,
+    ) -> None:
+        """Initialize BaseModel with ConfigDict generation from template data."""
+        super().__init__(
+            reference=reference,
+            fields=fields,
+            decorators=decorators,
+            base_classes=base_classes,
+            custom_base_class=custom_base_class,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            path=path,
+            description=description,
+            default=default,
+            nullable=nullable,
+            keyword_only=keyword_only,
+            treat_dot_as_module=treat_dot_as_module,
+        )
+        config_parameters: dict[str, Any] = {}
+
+        extra = self._get_config_extra()
+        if extra:
+            config_parameters["extra"] = extra
+
+        for from_, to, invert in self.CONFIG_ATTRIBUTES:
+            if from_ in self.extra_template_data:
+                config_parameters[to] = (
+                    not self.extra_template_data[from_] if invert else self.extra_template_data[from_]
+                )
+        for data_type in self.all_data_types:
+            if data_type.is_custom_type:  # pragma: no cover
+                config_parameters["arbitrary_types_allowed"] = True
+                break
+
+        for field in self.fields:
+            # Check if a regex pattern uses lookarounds.
+            # Depending on the generation configuration, the pattern may end up in two different places.
+            pattern = (isinstance(field.constraints, Constraints) and field.constraints.pattern) or (
+                field.data_type.kwargs or {}
+            ).get("pattern")
+            if pattern and re.search(r"\(\?<?[=!]", pattern):
+                config_parameters["regex_engine"] = '"python-re"'
+                break
+
+        if isinstance(self.extra_template_data.get("config"), dict):
+            for key, value in self.extra_template_data["config"].items():
+                config_parameters[key] = value  # noqa: PERF403
+
+        if config_parameters:
+            from datamodel_code_generator.model.pydantic_v2 import ConfigDict  # noqa: PLC0415
+
+            self.extra_template_data["config"] = ConfigDict.parse_obj(config_parameters)  # pyright: ignore[reportArgumentType]
+            self._additional_imports.append(IMPORT_CONFIG_DICT)
+
+    def _get_config_extra(self) -> Literal["'allow'", "'forbid'", "'ignore'"] | None:
+        additional_properties = self.extra_template_data.get("additionalProperties")
+        allow_extra_fields = self.extra_template_data.get("allow_extra_fields")
+        extra_fields = self.extra_template_data.get("extra_fields")
+
+        config_extra = None
+        if allow_extra_fields or extra_fields == "allow":
+            config_extra = "'allow'"
+        elif extra_fields == "forbid":
+            config_extra = "'forbid'"
+        elif extra_fields == "ignore":
+            config_extra = "'ignore'"
+        elif additional_properties is True:
+            config_extra = "'allow'"
+        elif additional_properties is False:
+            config_extra = "'forbid'"
+        return config_extra
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/imports.py 0.45.0-1/src/datamodel_code_generator/model/pydantic_v2/imports.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/imports.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/pydantic_v2/imports.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+"""Import definitions for Pydantic v2 types.
+
+Provides pre-defined Import objects for Pydantic v2 types (ConfigDict, AwareDatetime, etc.).
+"""
+
+from __future__ import annotations
+
+from datamodel_code_generator.imports import Import
+
+IMPORT_CONFIG_DICT = Import.from_full_path("pydantic.ConfigDict")
+IMPORT_AWARE_DATETIME = Import.from_full_path("pydantic.AwareDatetime")
+IMPORT_NAIVE_DATETIME = Import.from_full_path("pydantic.NaiveDatetime")
+IMPORT_BASE64STR = Import.from_full_path("pydantic.Base64Str")
+# IMPORT_BASE64STR: Used for OpenAPI strings with format "byte" (base64 encoded characters).
+IMPORT_SERIALIZE_AS_ANY = Import.from_full_path("pydantic.SerializeAsAny")
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/root_model.py 0.45.0-1/src/datamodel_code_generator/model/pydantic_v2/root_model.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/root_model.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/pydantic_v2/root_model.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,35 @@
+"""Pydantic v2 RootModel implementation.
+
+Generates models inheriting from pydantic.RootModel for wrapping single types.
+"""
+
+from __future__ import annotations
+
+from typing import Any, ClassVar, Literal
+
+from datamodel_code_generator.model.pydantic_v2.base_model import BaseModel
+
+
+class RootModel(BaseModel):
+    """DataModel for Pydantic v2 RootModel."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "pydantic_v2/RootModel.jinja2"
+    BASE_CLASS: ClassVar[str] = "pydantic.RootModel"
+
+    def __init__(
+        self,
+        **kwargs: Any,
+    ) -> None:
+        """Initialize RootModel and remove custom_base_class if present.
+
+        Remove custom_base_class for Pydantic V2 models; behaviour is different from Pydantic V1 as it will not
+        be treated as a root model. custom_base_class cannot both implement BaseModel and RootModel!
+        """
+        if "custom_base_class" in kwargs:
+            kwargs.pop("custom_base_class")
+
+        super().__init__(**kwargs)
+
+    def _get_config_extra(self) -> Literal["'allow'", "'forbid'"] | None:  # noqa: PLR6301
+        # PydanticV2 RootModels cannot have extra fields
+        return None
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/types.py 0.45.0-1/src/datamodel_code_generator/model/pydantic_v2/types.py
--- 0.26.4-3/src/datamodel_code_generator/model/pydantic_v2/types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/pydantic_v2/types.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,143 @@
+"""Pydantic v2 type manager.
+
+Maps schema types to Pydantic v2 specific types with AwareDatetime, NaiveDatetime, etc.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, ClassVar
+
+from datamodel_code_generator.format import DatetimeClassType
+from datamodel_code_generator.model.pydantic import DataTypeManager as _DataTypeManager
+from datamodel_code_generator.model.pydantic.imports import IMPORT_CONSTR
+from datamodel_code_generator.model.pydantic_v2.imports import (
+    IMPORT_AWARE_DATETIME,
+    IMPORT_BASE64STR,
+    IMPORT_NAIVE_DATETIME,
+    IMPORT_SERIALIZE_AS_ANY,
+)
+from datamodel_code_generator.types import (
+    DataType,
+    PythonVersion,
+    PythonVersionMin,
+    StrictTypes,
+    Types,
+)
+
+if TYPE_CHECKING:
+    from collections.abc import Iterator, Sequence
+
+    from datamodel_code_generator.imports import Import
+
+
+class PydanticV2DataType(DataType):
+    """Pydantic v2-specific DataType with SerializeAsAny support."""
+
+    def _should_wrap_with_serialize_as_any(self) -> bool:
+        if not self.use_serialize_as_any:
+            return False
+
+        assert self.reference is not None
+
+        from datamodel_code_generator.model.base import DataModel  # noqa: PLC0415
+
+        return any(isinstance(child, DataModel) and child.fields for child in self.reference.children)
+
+    def _get_wrapped_reference_type_hint(self, type_: str) -> str:
+        if self._should_wrap_with_serialize_as_any():
+            return f"SerializeAsAny[{type_}]"
+
+        return type_
+
+    @property
+    def imports(self) -> Iterator[Import]:
+        """Yield imports including SerializeAsAny when needed."""
+        yield from super().imports
+
+        if "SerializeAsAny" in self.type_hint:
+            yield IMPORT_SERIALIZE_AS_ANY
+
+
+class DataTypeManager(_DataTypeManager):
+    """Type manager for Pydantic v2 with pattern key support."""
+
+    PATTERN_KEY: ClassVar[str] = "pattern"
+
+    def __init__(  # noqa: PLR0913, PLR0917
+        self,
+        python_version: PythonVersion = PythonVersionMin,
+        use_standard_collections: bool = False,  # noqa: FBT001, FBT002
+        use_generic_container_types: bool = False,  # noqa: FBT001, FBT002
+        strict_types: Sequence[StrictTypes] | None = None,
+        use_non_positive_negative_number_constrained_types: bool = False,  # noqa: FBT001, FBT002
+        use_decimal_for_multiple_of: bool = False,  # noqa: FBT001, FBT002
+        use_union_operator: bool = False,  # noqa: FBT001, FBT002
+        use_pendulum: bool = False,  # noqa: FBT001, FBT002
+        target_datetime_class: DatetimeClassType | None = None,
+        treat_dot_as_module: bool = False,  # noqa: FBT001, FBT002
+        use_serialize_as_any: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
+        """Initialize with pydantic v2-specific DataType."""
+        super().__init__(
+            python_version=python_version,
+            use_standard_collections=use_standard_collections,
+            use_generic_container_types=use_generic_container_types,
+            strict_types=strict_types,
+            use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
+            use_decimal_for_multiple_of=use_decimal_for_multiple_of,
+            use_union_operator=use_union_operator,
+            use_pendulum=use_pendulum,
+            target_datetime_class=target_datetime_class,
+            treat_dot_as_module=treat_dot_as_module,
+            use_serialize_as_any=use_serialize_as_any,
+        )
+
+        # Override the data_type with our pydantic v2 version
+        from pydantic import create_model  # noqa: PLC0415
+
+        self.data_type: type[DataType] = create_model(
+            "PydanticV2ContextDataType",
+            python_version=(PythonVersion, python_version),
+            use_standard_collections=(bool, use_standard_collections),
+            use_generic_container=(bool, use_generic_container_types),
+            use_union_operator=(bool, use_union_operator),
+            treat_dot_as_module=(bool, treat_dot_as_module),
+            use_serialize_as_any=(bool, use_serialize_as_any),
+            __base__=PydanticV2DataType,
+        )
+
+    def type_map_factory(
+        self,
+        data_type: type[DataType],
+        strict_types: Sequence[StrictTypes],
+        pattern_key: str,
+        target_datetime_class: DatetimeClassType | None = None,
+    ) -> dict[Types, DataType]:
+        """Create type mapping with Pydantic v2 specific types and datetime classes."""
+        result = {
+            **super().type_map_factory(
+                data_type,
+                strict_types,
+                pattern_key,
+                target_datetime_class or DatetimeClassType.Datetime,
+            ),
+            Types.hostname: self.data_type.from_import(
+                IMPORT_CONSTR,
+                strict=StrictTypes.str in strict_types,
+                # https://github.com/horejsek/python-fastjsonschema/blob/61c6997a8348b8df9b22e029ca2ba35ef441fbb8/fastjsonschema/draft04.py#L31
+                kwargs={
+                    pattern_key: r"r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*"
+                    r"([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])$'",
+                    **({"strict": True} if StrictTypes.str in strict_types else {}),
+                },
+            ),
+            Types.byte: self.data_type.from_import(
+                IMPORT_BASE64STR,
+                strict=StrictTypes.str in strict_types,
+            ),
+        }
+        if target_datetime_class == DatetimeClassType.Awaredatetime:
+            result[Types.date_time] = data_type.from_import(IMPORT_AWARE_DATETIME)
+        elif target_datetime_class == DatetimeClassType.Naivedatetime:
+            result[Types.date_time] = data_type.from_import(IMPORT_NAIVE_DATETIME)
+        return result
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/scalar.py 0.45.0-1/src/datamodel_code_generator/model/scalar.py
--- 0.26.4-3/src/datamodel_code_generator/model/scalar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/scalar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,124 @@
+"""Scalar type model generator.
+
+Generates type aliases for GraphQL scalar types.
+"""
+
+from __future__ import annotations
+
+from collections import defaultdict
+from typing import TYPE_CHECKING, Any, ClassVar
+
+from datamodel_code_generator.imports import (
+    IMPORT_TYPE_ALIAS,
+    IMPORT_TYPE_ALIAS_BACKPORT,
+    IMPORT_TYPE_ALIAS_TYPE,
+    Import,
+)
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model.base import UNDEFINED
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from datamodel_code_generator.reference import Reference
+
+_INT: str = "int"
+_FLOAT: str = "float"
+_BOOLEAN: str = "bool"
+_STR: str = "str"
+
+# default graphql scalar types
+DEFAULT_GRAPHQL_SCALAR_TYPE = _STR
+
+DEFAULT_GRAPHQL_SCALAR_TYPES: dict[str, str] = {
+    "Boolean": _BOOLEAN,
+    "String": _STR,
+    "ID": _STR,
+    "Int": _INT,
+    "Float": _FLOAT,
+}
+
+
+class _DataTypeScalarBase(DataModel):
+    """Base class for GraphQL scalar types with shared __init__ logic."""
+
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        methods: list[str] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        treat_dot_as_module: bool = False,
+    ) -> None:
+        """Initialize GraphQL scalar type with Python type mapping."""
+        extra_template_data = extra_template_data or defaultdict(dict)
+
+        scalar_name = reference.name
+        if scalar_name not in extra_template_data:
+            extra_template_data[scalar_name] = defaultdict(dict)
+
+        # py_type
+        py_type = extra_template_data[scalar_name].get(
+            "py_type",
+            DEFAULT_GRAPHQL_SCALAR_TYPES.get(reference.name, DEFAULT_GRAPHQL_SCALAR_TYPE),
+        )
+        extra_template_data[scalar_name]["py_type"] = py_type
+
+        super().__init__(
+            reference=reference,
+            fields=fields,
+            decorators=decorators,
+            base_classes=base_classes,
+            custom_base_class=custom_base_class,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            methods=methods,
+            path=path,
+            description=description,
+            default=default,
+            nullable=nullable,
+            keyword_only=keyword_only,
+            treat_dot_as_module=treat_dot_as_module,
+        )
+
+
+class DataTypeScalar(_DataTypeScalarBase):
+    """GraphQL scalar using TypeAlias annotation for Python 3.10+ (Name: TypeAlias = type)."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "ScalarTypeAliasAnnotation.jinja2"
+    BASE_CLASS: ClassVar[str] = ""
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_TYPE_ALIAS,)
+
+
+class DataTypeScalarBackport(_DataTypeScalarBase):
+    """GraphQL scalar using TypeAlias annotation for Python 3.9 (Name: TypeAlias = type)."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "ScalarTypeAliasAnnotation.jinja2"
+    BASE_CLASS: ClassVar[str] = ""
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_TYPE_ALIAS_BACKPORT,)
+
+
+class DataTypeScalarTypeBackport(_DataTypeScalarBase):
+    """GraphQL scalar using TypeAliasType for Python 3.9-3.11 (Name = TypeAliasType("Name", type))."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "ScalarTypeAliasType.jinja2"
+    BASE_CLASS: ClassVar[str] = ""
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_TYPE_ALIAS_TYPE,)
+
+
+class DataTypeScalarTypeStatement(_DataTypeScalarBase):
+    """GraphQL scalar using type statement for Python 3.12+ (type Name = type)."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "ScalarTypeStatement.jinja2"
+    BASE_CLASS: ClassVar[str] = ""
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = ()
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/Enum.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/Enum.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/Enum.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/Enum.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+class {{ class_name }}({{ base_class }}):
+{%- if description %}
+    """
+    {{ description | indent(4) }}
+    """
+{%- endif %}
+{%- for field in fields %}
+    {{ field.name }} = {{ field.default }}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+{%- if field.use_inline_field_description and not loop.last %}
+
+{% endif %}
+    {%- elif field.inline_field_docstring %}
+    {{ field.inline_field_docstring }}
+{%- if not loop.last %}
+
+{% endif %}
+    {%- endif %}
+{%- endfor -%}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/ScalarTypeAliasAnnotation.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/ScalarTypeAliasAnnotation.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/ScalarTypeAliasAnnotation.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/ScalarTypeAliasAnnotation.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+{{ class_name }}: TypeAlias = {{ py_type }}
+{%- if description %}
+"""
+{{ description }}
+"""
+{%- endif %}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/ScalarTypeAliasType.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/ScalarTypeAliasType.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/ScalarTypeAliasType.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/ScalarTypeAliasType.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+{{ class_name }} = TypeAliasType("{{ class_name }}", {{ py_type }})
+{%- if description %}
+"""
+{{ description }}
+"""
+{%- endif %}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/ScalarTypeStatement.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/ScalarTypeStatement.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/ScalarTypeStatement.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/ScalarTypeStatement.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+type {{ class_name }} = {{ py_type }}
+{%- if description %}
+"""
+{{ description }}
+"""
+{%- endif %}
\ No newline at end of file
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/TypeAliasAnnotation.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/TypeAliasAnnotation.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/TypeAliasAnnotation.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/TypeAliasAnnotation.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+{%- macro get_type_annotation(_field) -%}
+{%- if _field.annotated -%}
+{{ _field.annotated }}
+{%- elif _field.field -%}
+Annotated[{{ _field.type_hint }}, {{ _field.field }}]
+{%- else -%}
+{{ _field.type_hint }}
+{%- endif -%}
+{%- endmacro -%}
+
+{{ class_name }}: TypeAlias = {{ get_type_annotation(fields[0]) }}{% if comment is defined %}  # {{ comment }}{% endif %}
+{%- if description %}
+"""
+{{ description | indent(0) }}
+"""
+{%- elif fields and fields[0].docstring %}
+"""
+{{ fields[0].docstring | indent(0) }}
+"""
+{%- endif %}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/TypeAliasType.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/TypeAliasType.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/TypeAliasType.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/TypeAliasType.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+{%- macro get_type_annotation(_field) -%}
+{%- if _field.annotated -%}
+{{ _field.annotated }}
+{%- elif _field.field -%}
+Annotated[{{ _field.type_hint }}, {{ _field.field }}]
+{%- else -%}
+{{ _field.type_hint }}
+{%- endif -%}
+{%- endmacro -%}
+
+{{ class_name }} = TypeAliasType("{{ class_name }}", {{ get_type_annotation(fields[0]) }}){% if comment is defined %}  # {{ comment }}{% endif %}
+{%- if description %}
+"""
+{{ description | indent(0) }}
+"""
+{%- elif fields and fields[0].docstring %}
+"""
+{{ fields[0].docstring | indent(0) }}
+"""
+{%- endif %}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/TypeStatement.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/TypeStatement.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/TypeStatement.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/TypeStatement.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+{%- macro get_type_annotation(_field) -%}
+{%- if _field.annotated -%}
+{{ _field.annotated }}
+{%- elif _field.field -%}
+Annotated[{{ _field.type_hint }}, {{ _field.field }}]
+{%- else -%}
+{{ _field.type_hint }}
+{%- endif -%}
+{%- endmacro -%}
+
+type {{ class_name }} = {{ get_type_annotation(fields[0]) }}{% if comment is defined %}  # {{ comment }}{% endif %}
+{%- if description %}
+"""
+{{ description | indent(0) }}
+"""
+{%- elif fields and fields[0].docstring %}
+"""
+{{ fields[0].docstring | indent(0) }}
+"""
+{%- endif %}
\ No newline at end of file
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/TypedDict.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/TypedDict.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/TypedDict.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/TypedDict.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,5 @@
+{%- if is_functional_syntax %}
+{% include 'TypedDictFunction.jinja2' %}
+{%- else %}
+{% include 'TypedDictClass.jinja2' %}
+{%- endif %}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/TypedDictClass.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/TypedDictClass.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/TypedDictClass.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/TypedDictClass.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+class {{ class_name }}({{ base_class }}):
+{%- if description %}
+    """
+    {{ description | indent(4) }}
+    """
+{%- endif %}
+{%- if not fields and not description %}
+    pass
+{%- endif %}
+{%- for field in fields %}
+    {{ field.name }}: {{ field.type_hint }}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+{%- if field.use_inline_field_description and not loop.last %}
+
+{% endif %}
+    {%- elif field.inline_field_docstring %}
+    {{ field.inline_field_docstring }}
+{%- if not loop.last %}
+
+{% endif %}
+    {%- endif %}
+{%- endfor -%}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/TypedDictFunction.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/TypedDictFunction.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/TypedDictFunction.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/TypedDictFunction.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+{%- if description %}
+"""
+{{ description | indent(4) }}
+"""
+{%- endif %}
+{{ class_name }} = TypedDict('{{ class_name }}', {
+{%- for field in all_fields %}
+    '{{ field.key }}': {{ field.type_hint }},
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+{%- if field.use_inline_field_description and not loop.last %}
+
+{% endif %}
+    {%- elif field.inline_field_docstring %}
+    {{ field.inline_field_docstring }}
+{%- if not loop.last %}
+
+{% endif %}
+    {%- endif %}
+{%- endfor -%}
+})
+
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/UnionTypeAliasAnnotation.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/UnionTypeAliasAnnotation.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/UnionTypeAliasAnnotation.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/UnionTypeAliasAnnotation.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+{%- if description %}
+# {{ description | replace('\n', '\n# ') }}
+{%- endif %}
+{%- if fields|length > 1 %}
+{{ class_name }}: TypeAlias = Union[
+{%- for field in fields %}
+        '{{ field.name }}',
+{%- endfor %}
+    ]{% else %}
+{{ class_name }}: TypeAlias = {{ fields[0].name }}{% endif %}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/UnionTypeAliasType.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/UnionTypeAliasType.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/UnionTypeAliasType.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/UnionTypeAliasType.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+{%- if description %}
+# {{ description | replace('\n', '\n# ') }}
+{%- endif %}
+{%- if fields|length > 1 %}
+{{ class_name }} = TypeAliasType("{{ class_name }}", Union[
+{%- for field in fields %}
+    '{{ field.name }}',
+{%- endfor %}
+]){% else %}
+{{ class_name }} = TypeAliasType("{{ class_name }}", {{ fields[0].name }}){% endif %}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/UnionTypeStatement.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/UnionTypeStatement.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/UnionTypeStatement.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/UnionTypeStatement.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+{%- if description %}
+# {{ description | replace('\n', '\n# ') }}
+{%- endif %}
+{%- if fields|length > 1 %}
+type {{ class_name }} = Union[
+{%- for field in fields %}
+    '{{ field.name }}',
+{%- endfor %}
+]{% else %}
+type {{ class_name }} = {{ fields[0].name }}{% endif %}
\ No newline at end of file
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/dataclass.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/dataclass.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/dataclass.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/dataclass.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,50 @@
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+{%- set args = [] %}
+{%- for k, v in (dataclass_arguments or {}).items() %}
+    {%- if v is not none and v is not false %}
+        {%- set _ = args.append(k ~ '=' ~ (v|pprint)) %}
+    {%- endif %}
+{%- endfor %}
+{%- if args %}
+@dataclass({{ args | join(', ') }})
+{%- else %}
+@dataclass
+{%- endif %}
+{%- if base_class %}
+class {{ class_name }}({{ base_class }}):
+{%- else %}
+class {{ class_name }}:
+{%- endif %}
+{%- if description %}
+    """
+    {{ description | indent(4) }}
+    """
+{%- endif %}
+{%- if not fields and not description %}
+    pass
+{%- endif %}
+{%- for field in fields -%}
+    {%- if field.field %}
+    {{ field.name }}: {{ field.type_hint }} = {{ field.field }}
+    {%- else %}
+    {{ field.name }}: {{ field.type_hint }}
+    {%- if not (field.required or (field.represented_default == 'None' and field.strip_default_none))
+            %} = {{ field.represented_default }}
+    {%- endif -%}
+    {%- endif %}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+{%- if field.use_inline_field_description and not loop.last %}
+
+{% endif %}
+    {%- elif field.inline_field_docstring %}
+    {{ field.inline_field_docstring }}
+{%- if not loop.last %}
+
+{% endif %}
+    {%- endif %}
+{%- endfor -%}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/msgspec.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/msgspec.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/msgspec.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/msgspec.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,55 @@
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+{%- if base_class %}
+class {{ class_name }}({{ base_class }}{%- for key, value in (base_class_kwargs|default({})).items() -%}
+, {{ key }}={{ value }}
+{%- endfor -%}):
+{%- else %}
+class {{ class_name }}:
+{%- endif %}
+{%- if description %}
+    """
+    {{ description | indent(4) }}
+    """
+{%- endif %}
+{%- set ns = namespace(has_rendered_field=false) -%}
+{%- for field in fields -%}
+    {%- if field.extras.get('is_classvar') %}
+    {#- Skip fields with is_classvar=True - they are managed by msgspec tag_field -#}
+    {%- elif not field.annotated and field.field %}
+    {%- set ns.has_rendered_field = true %}
+    {{ field.name }}: {{ field.type_hint }} = {{ field.field }}
+    {%- else %}
+    {%- set ns.has_rendered_field = true %}
+    {%- if field.annotated and not field.field %}
+    {{ field.name }}: {{ field.annotated }}
+    {%- elif field.annotated and field.field %}
+    {{ field.name }}: {{ field.annotated }} = {{ field.field }}
+    {%- else %}
+    {{ field.name }}: {{ field.type_hint }}
+    {%- endif %}
+    {%- if not field.field and (not field.required or field.data_type.is_optional or field.nullable)
+            %} = {{ field.represented_default }}
+    {%- endif -%}
+    {%- endif %}
+
+
+
+    {%- if not field.extras.get('is_classvar') and field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+{%- if field.use_inline_field_description and not loop.last %}
+
+{% endif %}
+    {%- elif not field.extras.get('is_classvar') and field.inline_field_docstring %}
+    {{ field.inline_field_docstring }}
+{%- if not loop.last %}
+
+{% endif %}
+    {%- endif %}
+{%- endfor -%}
+{%- if not ns.has_rendered_field and not description %}
+    pass
+{%- endif -%}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/pydantic/BaseModel.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,47 @@
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+class {{ class_name }}({{ base_class }}):{% if comment is defined %}  # {{ comment }}{% endif %}
+{%- if description %}
+    """
+    {{ description | indent(4) }}
+    """
+{%- endif %}
+{%- if not fields and not description %}
+    pass
+{%- endif %}
+{%- if config %}
+{%- filter indent(4) %}
+{% include 'Config.jinja2' %}
+{%- endfilter %}
+{%- endif %}
+{%- for field in fields -%}
+    {%- if not field.annotated and field.field %}
+    {{ field.name }}: {{ field.type_hint }} = {{ field.field }}
+    {%- else %}
+    {%- if field.annotated %}
+    {{ field.name }}: {{ field.annotated }}
+    {%- else %}
+    {{ field.name }}: {{ field.type_hint }}
+    {%- endif %}
+    {%- if not field.has_default_factory_in_field and not (field.required or (field.represented_default == 'None' and field.strip_default_none))
+            %} = {{ field.represented_default }}
+    {%- endif -%}
+    {%- endif %}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+{%- if field.use_inline_field_description and not loop.last %}
+
+{% endif %}
+    {%- elif field.inline_field_docstring %}
+    {{ field.inline_field_docstring }}
+{%- if not loop.last %}
+
+{% endif %}
+    {%- endif %}
+{%- for method in methods -%}
+    {{ method }}
+{%- endfor -%}
+{%- endfor -%}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/pydantic/BaseModel_root.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,39 @@
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+class {{ class_name }}({{ base_class }}):{% if comment is defined %}  # {{ comment }}{% endif %}
+{%- if description %}
+    """
+    {{ description | indent(4) }}
+    """
+{%- endif %}
+{%- if config %}
+{%- filter indent(4) %}
+{% include 'Config.jinja2' %}
+{%- endfilter %}
+{%- endif %}
+{%- if not fields and not description %}
+    pass
+{%- else %}
+    {%- set field = fields[0] %}
+    {%- if not field.annotated and field.field %}
+    __root__: {{ field.type_hint }} = {{ field.field }}
+    {%- else %}
+    {%- if field.annotated %}
+    __root__: {{ field.annotated }}
+    {%- else %}
+    __root__: {{ field.type_hint }}
+    {%- endif %}
+    {%- if not field.has_default_factory_in_field and not (field.required or (field.represented_default == 'None' and field.strip_default_none))
+            %} = {{ field.represented_default }}
+    {%- endif -%}
+    {%- endif %}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+    {%- elif field.inline_field_docstring %}
+    {{ field.inline_field_docstring }}
+
+    {%- endif %}
+{%- endif %}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/pydantic/Config.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/pydantic/Config.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/pydantic/Config.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/pydantic/Config.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,4 @@
+class Config:
+{%- for field_name, value in config.dict(exclude_unset=True).items() %}
+    {{ field_name }} = {{ value }}
+{%- endfor %}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/pydantic/dataclass.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/pydantic/dataclass.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/pydantic/dataclass.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/pydantic/dataclass.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+@dataclass
+{%- if base_class %}
+class {{ class_name }}({{ base_class }}):
+{%- else %}
+class {{ class_name }}:
+{%- endif %}
+{%- if description %}
+    """
+    {{ description | indent(4) }}
+    """
+{%- endif %}
+{%- if not fields %}
+    pass
+{%- endif %}
+{%- for field in fields -%}
+    {%- if field.default %}
+    {{ field.name }}: {{ field.type_hint }} = {{field.default}}
+    {%- else %}
+    {{ field.name }}: {{ field.type_hint }}
+    {%- endif %}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+{%- if field.use_inline_field_description and not loop.last %}
+
+{% endif %}
+    {%- elif field.inline_field_docstring %}
+    {{ field.inline_field_docstring }}
+{%- if not loop.last %}
+
+{% endif %}
+    {%- endif %}
+{%- endfor -%}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/pydantic_v2/BaseModel.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,57 @@
+{% if base_class != "BaseModel" and "," not in base_class and not fields and not config and not description -%}
+
+{# if this is just going to be `class Foo(Bar): pass`, then might as well just make Foo
+an alias for Bar: every pydantic model class consumes considerable memory. #}
+{{ class_name }} = {{ base_class }}
+
+{% else -%}
+
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+class {{ class_name }}({{ base_class }}):{% if comment is defined %}  # {{ comment }}{% endif %}
+{%- if description %}
+    """
+    {{ description | indent(4) }}
+    """
+{%- endif %}
+{%- if not fields and not description %}
+    pass
+{%- endif %}
+{%- if config %}
+{%- filter indent(4) %}
+{% include 'ConfigDict.jinja2' %}
+{%- endfilter %}
+{%- endif %}
+{%- for field in fields %}
+    {%- if not field.annotated and field.field %}
+    {{ field.name }}: {{ field.type_hint }} = {{ field.field }}
+    {%- else %}
+    {%- if field.annotated %}
+    {{ field.name }}: {{ field.annotated }}
+    {%- else %}
+    {{ field.name }}: {{ field.type_hint }}
+    {%- endif %}
+    {%- if not field.has_default_factory_in_field and (not (field.required or (field.represented_default == 'None' and field.strip_default_none)) or field.data_type.is_optional)
+            %} = {{ field.represented_default }}
+    {%- endif -%}
+    {%- endif %}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+{%- if field.use_inline_field_description and not loop.last %}
+
+{% endif %}
+    {%- elif field.inline_field_docstring %}
+    {{ field.inline_field_docstring }}
+{%- if not loop.last %}
+
+{% endif %}
+    {%- endif %}
+{%- for method in methods -%}
+    {{ method }}
+{%- endfor -%}
+{%- endfor -%}
+
+{%- endif %}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/pydantic_v2/ConfigDict.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,5 @@
+model_config = ConfigDict(
+{%- for field_name, value in config.dict(exclude_unset=True).items() %}
+    {{ field_name }}={{ value }},
+{%- endfor %}
+)
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2 0.45.0-1/src/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2
--- 0.26.4-3/src/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/template/pydantic_v2/RootModel.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,48 @@
+{%- macro get_type_hint(_fields) -%}
+{%- if _fields -%}
+{#There will only ever be a single field for RootModel#}
+{{- _fields[0].type_hint}}
+{%- endif -%}
+{%- endmacro -%}
+
+
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+
+class {{ class_name }}({{ base_class }}{%- if fields -%}[{{get_type_hint(fields)}}]{%- endif -%}):{% if comment is defined %}  # {{ comment }}{% endif %}
+{%- if description %}
+    """
+    {{ description | indent(4) }}
+    """
+{%- endif %}
+{%- if config %}
+{%- filter indent(4) %}
+{% include 'ConfigDict.jinja2' %}
+{%- endfilter %}
+{%- endif %}
+{%- if not fields and not description %}
+    pass
+{%- else %}
+    {%- set field = fields[0] %}
+    {%- if not field.annotated and field.field %}
+    root: {{ field.type_hint }} = {{ field.field }}
+    {%- else %}
+    {%- if field.annotated %}
+    root: {{ field.annotated }}
+    {%- else %}
+    root: {{ field.type_hint }}
+    {%- endif %}
+    {%- if not field.has_default_factory_in_field and not (field.required or (field.represented_default == 'None' and field.strip_default_none))
+            %} = {{ field.represented_default }}
+    {%- endif -%}
+    {%- endif %}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring | indent(4) }}
+    """
+    {%- elif field.inline_field_docstring %}
+    {{ field.inline_field_docstring }}
+
+    {%- endif %}
+{%- endif %}
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/type_alias.py 0.45.0-1/src/datamodel_code_generator/model/type_alias.py
--- 0.26.4-3/src/datamodel_code_generator/model/type_alias.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/type_alias.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,70 @@
+"""Type alias model generators.
+
+Provides classes for generating type aliases using different Python syntax:
+TypeAlias annotation, TypeAliasType, and type statement (Python 3.12+).
+"""
+
+from __future__ import annotations
+
+from typing import ClassVar
+
+from datamodel_code_generator.imports import (
+    IMPORT_ANNOTATED,
+    IMPORT_TYPE_ALIAS,
+    IMPORT_TYPE_ALIAS_BACKPORT,
+    IMPORT_TYPE_ALIAS_TYPE,
+    Import,
+)
+from datamodel_code_generator.model import DataModel
+from datamodel_code_generator.types import chain_as_tuple
+
+
+class TypeAliasBase(DataModel):
+    """Base class for all type alias implementations."""
+
+    IS_ALIAS: bool = True
+
+    @property
+    def imports(self) -> tuple[Import, ...]:
+        """Get imports including Annotated if needed."""
+        imports = super().imports
+        if self.fields and (self.fields[0].annotated or self.fields[0].field):
+            imports = chain_as_tuple(imports, (IMPORT_ANNOTATED,))
+
+        return imports
+
+
+class TypeAlias(TypeAliasBase):
+    """TypeAlias annotation for Python 3.10+ (Name: TypeAlias = type)."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "TypeAliasAnnotation.jinja2"
+    BASE_CLASS: ClassVar[str] = ""
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_TYPE_ALIAS,)
+
+
+class TypeAliasBackport(TypeAliasBase):
+    """TypeAlias annotation for Python 3.9 (Name: TypeAlias = type) using typing_extensions."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "TypeAliasAnnotation.jinja2"
+    BASE_CLASS: ClassVar[str] = ""
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_TYPE_ALIAS_BACKPORT,)
+
+
+class TypeAliasTypeBackport(TypeAliasBase):
+    """TypeAliasType for Python 3.9-3.11 (Name = TypeAliasType("Name", type))."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "TypeAliasType.jinja2"
+    BASE_CLASS: ClassVar[str] = ""
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_TYPE_ALIAS_TYPE,)
+
+
+class TypeStatement(TypeAliasBase):
+    """Type statement for Python 3.12+ (type Name = type).
+
+    Note: Python 3.12+ type statements use deferred evaluation,
+    so forward references don't need to be quoted.
+    """
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "TypeStatement.jinja2"
+    BASE_CLASS: ClassVar[str] = ""
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = ()
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/typed_dict.py 0.45.0-1/src/datamodel_code_generator/model/typed_dict.py
--- 0.26.4-3/src/datamodel_code_generator/model/typed_dict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/typed_dict.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,161 @@
+"""TypedDict model generator.
+
+Generates Python TypedDict classes for use with type checkers.
+"""
+
+from __future__ import annotations
+
+import keyword
+from typing import TYPE_CHECKING, Any, ClassVar
+
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model.base import UNDEFINED
+from datamodel_code_generator.model.imports import (
+    IMPORT_NOT_REQUIRED,
+    IMPORT_NOT_REQUIRED_BACKPORT,
+    IMPORT_TYPED_DICT,
+)
+from datamodel_code_generator.types import NOT_REQUIRED_PREFIX
+
+if TYPE_CHECKING:
+    from collections import defaultdict
+    from collections.abc import Iterator
+    from pathlib import Path
+
+    from datamodel_code_generator.imports import Import
+    from datamodel_code_generator.reference import Reference
+
+
+escape_characters = str.maketrans({
+    "\\": r"\\",
+    "'": r"\'",
+    "\b": r"\b",
+    "\f": r"\f",
+    "\n": r"\n",
+    "\r": r"\r",
+    "\t": r"\t",
+})
+
+
+def _is_valid_field_name(field: DataModelFieldBase) -> bool:
+    name = field.original_name or field.name
+    if name is None:  # pragma: no cover
+        return False
+    return name.isidentifier() and not keyword.iskeyword(name)
+
+
+class TypedDict(DataModel):
+    """DataModel implementation for Python TypedDict."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "TypedDict.jinja2"
+    BASE_CLASS: ClassVar[str] = "typing.TypedDict"
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_TYPED_DICT,)
+
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        methods: list[str] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        treat_dot_as_module: bool = False,
+    ) -> None:
+        """Initialize TypedDict model."""
+        super().__init__(
+            reference=reference,
+            fields=fields,
+            decorators=decorators,
+            base_classes=base_classes,
+            custom_base_class=custom_base_class,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            methods=methods,
+            path=path,
+            description=description,
+            default=default,
+            nullable=nullable,
+            keyword_only=keyword_only,
+            treat_dot_as_module=treat_dot_as_module,
+        )
+
+    @property
+    def is_functional_syntax(self) -> bool:
+        """Check if TypedDict requires functional syntax."""
+        return any(not _is_valid_field_name(f) for f in self.fields)
+
+    @property
+    def all_fields(self) -> Iterator[DataModelFieldBase]:
+        """Iterate over all fields including inherited ones."""
+        yield from self.iter_all_fields()
+
+    def render(self, *, class_name: str | None = None) -> str:
+        """Render TypedDict class with appropriate syntax."""
+        return self._render(
+            class_name=class_name or self.class_name,
+            fields=self.fields,
+            decorators=self.decorators,
+            base_class=self.base_class,
+            methods=self.methods,
+            description=self.description,
+            is_functional_syntax=self.is_functional_syntax,
+            all_fields=self.all_fields,
+            **self.extra_template_data,
+        )
+
+
+class DataModelField(DataModelFieldBase):
+    """Field implementation for TypedDict models."""
+
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_NOT_REQUIRED,)
+
+    def process_const(self) -> None:
+        """Process const field constraint using literal type."""
+        self._process_const_as_literal()
+
+    @property
+    def key(self) -> str:
+        """Get escaped field key for TypedDict."""
+        return (self.original_name or self.name or "").translate(  # pragma: no cover
+            escape_characters
+        )
+
+    @property
+    def type_hint(self) -> str:
+        """Get type hint with NotRequired wrapper if needed."""
+        type_hint = super().type_hint
+        if self._not_required:
+            return f"{NOT_REQUIRED_PREFIX}{type_hint}]"
+        return type_hint
+
+    @property
+    def _not_required(self) -> bool:
+        """Check if field should be marked as NotRequired."""
+        return not self.required and isinstance(self.parent, TypedDict)
+
+    @property
+    def fall_back_to_nullable(self) -> bool:
+        """Check if field should fall back to nullable."""
+        return not self._not_required
+
+    @property
+    def imports(self) -> tuple[Import, ...]:
+        """Get imports including NotRequired if needed."""
+        return (
+            *super().imports,
+            *(self.DEFAULT_IMPORTS if self._not_required else ()),
+        )
+
+
+class DataModelFieldBackport(DataModelField):
+    """Field implementation for TypedDict models using typing_extensions."""
+
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_NOT_REQUIRED_BACKPORT,)
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/types.py 0.45.0-1/src/datamodel_code_generator/model/types.py
--- 0.26.4-3/src/datamodel_code_generator/model/types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/types.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,106 @@
+"""Base type manager for model modules.
+
+Provides DataTypeManager implementation with type mapping factory.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+from datamodel_code_generator import DatetimeClassType, PythonVersion, PythonVersionMin
+from datamodel_code_generator.imports import (
+    IMPORT_ANY,
+    IMPORT_DECIMAL,
+    IMPORT_TIMEDELTA,
+)
+from datamodel_code_generator.types import DataType, StrictTypes, Types
+from datamodel_code_generator.types import DataTypeManager as _DataTypeManager
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+
+def type_map_factory(data_type: type[DataType]) -> dict[Types, DataType]:
+    """Create type mapping for common schema types to Python types."""
+    data_type_int = data_type(type="int")
+    data_type_float = data_type(type="float")
+    data_type_str = data_type(type="str")
+    return {
+        # TODO: Should we support a special type such UUID?
+        Types.integer: data_type_int,
+        Types.int32: data_type_int,
+        Types.int64: data_type_int,
+        Types.number: data_type_float,
+        Types.float: data_type_float,
+        Types.double: data_type_float,
+        Types.decimal: data_type.from_import(IMPORT_DECIMAL),
+        Types.time: data_type_str,
+        Types.string: data_type_str,
+        Types.byte: data_type_str,  # base64 encoded string
+        Types.binary: data_type(type="bytes"),
+        Types.date: data_type_str,
+        Types.date_time: data_type_str,
+        Types.timedelta: data_type.from_import(IMPORT_TIMEDELTA),
+        Types.password: data_type_str,
+        Types.email: data_type_str,
+        Types.uuid: data_type_str,
+        Types.uuid1: data_type_str,
+        Types.uuid2: data_type_str,
+        Types.uuid3: data_type_str,
+        Types.uuid4: data_type_str,
+        Types.uuid5: data_type_str,
+        Types.uri: data_type_str,
+        Types.hostname: data_type_str,
+        Types.ipv4: data_type_str,
+        Types.ipv6: data_type_str,
+        Types.ipv4_network: data_type_str,
+        Types.ipv6_network: data_type_str,
+        Types.boolean: data_type(type="bool"),
+        Types.object: data_type.from_import(IMPORT_ANY, is_dict=True),
+        Types.null: data_type(type="None"),
+        Types.array: data_type.from_import(IMPORT_ANY, is_list=True),
+        Types.any: data_type.from_import(IMPORT_ANY),
+    }
+
+
+class DataTypeManager(_DataTypeManager):
+    """Base type manager for model modules."""
+
+    def __init__(  # noqa: PLR0913, PLR0917
+        self,
+        python_version: PythonVersion = PythonVersionMin,
+        use_standard_collections: bool = False,  # noqa: FBT001, FBT002
+        use_generic_container_types: bool = False,  # noqa: FBT001, FBT002
+        strict_types: Sequence[StrictTypes] | None = None,
+        use_non_positive_negative_number_constrained_types: bool = False,  # noqa: FBT001, FBT002
+        use_decimal_for_multiple_of: bool = False,  # noqa: FBT001, FBT002
+        use_union_operator: bool = False,  # noqa: FBT001, FBT002
+        use_pendulum: bool = False,  # noqa: FBT001, FBT002
+        target_datetime_class: DatetimeClassType | None = None,
+        treat_dot_as_module: bool = False,  # noqa: FBT001, FBT002
+        use_serialize_as_any: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
+        """Initialize type manager with basic type mapping."""
+        super().__init__(
+            python_version,
+            use_standard_collections,
+            use_generic_container_types,
+            strict_types,
+            use_non_positive_negative_number_constrained_types,
+            use_decimal_for_multiple_of,
+            use_union_operator,
+            use_pendulum,
+            target_datetime_class,
+            treat_dot_as_module,
+            use_serialize_as_any,
+        )
+
+        self.type_map: dict[Types, DataType] = type_map_factory(self.data_type)
+
+    def get_data_type(
+        self,
+        types: Types,
+        **_: Any,
+    ) -> DataType:
+        """Get data type for schema type."""
+        return self.type_map[types]
diff -pruN 0.26.4-3/src/datamodel_code_generator/model/union.py 0.45.0-1/src/datamodel_code_generator/model/union.py
--- 0.26.4-3/src/datamodel_code_generator/model/union.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/model/union.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,105 @@
+"""Union type model generators.
+
+Provides classes for generating union type aliases for GraphQL union types.
+"""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any, ClassVar
+
+from datamodel_code_generator.imports import (
+    IMPORT_TYPE_ALIAS,
+    IMPORT_TYPE_ALIAS_BACKPORT,
+    IMPORT_TYPE_ALIAS_TYPE,
+    IMPORT_UNION,
+    Import,
+)
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model.base import UNDEFINED
+
+if TYPE_CHECKING:
+    from collections import defaultdict
+    from pathlib import Path
+
+    from datamodel_code_generator.reference import Reference
+
+
+class _DataTypeUnionBase(DataModel):
+    """Base class for GraphQL union types with shared __init__ logic."""
+
+    def __init__(  # noqa: PLR0913
+        self,
+        *,
+        reference: Reference,
+        fields: list[DataModelFieldBase],
+        decorators: list[str] | None = None,
+        base_classes: list[Reference] | None = None,
+        custom_base_class: str | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        methods: list[str] | None = None,
+        path: Path | None = None,
+        description: str | None = None,
+        default: Any = UNDEFINED,
+        nullable: bool = False,
+        keyword_only: bool = False,
+        treat_dot_as_module: bool = False,
+    ) -> None:
+        """Initialize GraphQL union type."""
+        super().__init__(
+            reference=reference,
+            fields=fields,
+            decorators=decorators,
+            base_classes=base_classes,
+            custom_base_class=custom_base_class,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            methods=methods,
+            path=path,
+            description=description,
+            default=default,
+            nullable=nullable,
+            keyword_only=keyword_only,
+            treat_dot_as_module=treat_dot_as_module,
+        )
+
+
+class DataTypeUnion(_DataTypeUnionBase):
+    """GraphQL union using TypeAlias annotation for Python 3.10+ (Name: TypeAlias = Union[...])."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "UnionTypeAliasAnnotation.jinja2"
+    BASE_CLASS: ClassVar[str] = ""
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (
+        IMPORT_TYPE_ALIAS,
+        IMPORT_UNION,
+    )
+
+
+class DataTypeUnionBackport(_DataTypeUnionBase):
+    """GraphQL union using TypeAlias annotation for Python 3.9 (Name: TypeAlias = Union[...])."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "UnionTypeAliasAnnotation.jinja2"
+    BASE_CLASS: ClassVar[str] = ""
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (
+        IMPORT_TYPE_ALIAS_BACKPORT,
+        IMPORT_UNION,
+    )
+
+
+class DataTypeUnionTypeBackport(_DataTypeUnionBase):
+    """GraphQL union using TypeAliasType for Python 3.9-3.11 (Name = TypeAliasType("Name", Union[...]))."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "UnionTypeAliasType.jinja2"
+    BASE_CLASS: ClassVar[str] = ""
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (
+        IMPORT_TYPE_ALIAS_TYPE,
+        IMPORT_UNION,
+    )
+
+
+class DataTypeUnionTypeStatement(_DataTypeUnionBase):
+    """GraphQL union using type statement for Python 3.12+ (type Name = Union[...])."""
+
+    TEMPLATE_FILE_PATH: ClassVar[str] = "UnionTypeStatement.jinja2"
+    BASE_CLASS: ClassVar[str] = ""
+    DEFAULT_IMPORTS: ClassVar[tuple[Import, ...]] = (IMPORT_UNION,)
diff -pruN 0.26.4-3/src/datamodel_code_generator/parser/__init__.py 0.45.0-1/src/datamodel_code_generator/parser/__init__.py
--- 0.26.4-3/src/datamodel_code_generator/parser/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/parser/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,49 @@
+"""Parser utilities and base types for schema parsing.
+
+Provides LiteralType enum for literal parsing options and DefaultPutDict
+for caching remote schema content.
+"""
+
+from __future__ import annotations
+
+from collections import UserDict
+from enum import Enum
+from typing import Callable, TypeVar
+
+TK = TypeVar("TK")
+TV = TypeVar("TV")
+
+
+class LiteralType(Enum):
+    """Options for handling enum fields as literals."""
+
+    All = "all"
+    One = "one"
+
+
+class DefaultPutDict(UserDict[TK, TV]):
+    """Dict that can lazily compute and cache missing values."""
+
+    def get_or_put(
+        self,
+        key: TK,
+        default: TV | None = None,
+        default_factory: Callable[[TK], TV] | None = None,
+    ) -> TV:
+        """Get value for key, or compute and store it if missing."""
+        if key in self:
+            return self[key]
+        if default:  # pragma: no cover
+            value = self[key] = default
+            return value
+        if default_factory:
+            value = self[key] = default_factory(key)
+            return value
+        msg = "Not found default and default_factory"  # pragma: no cover
+        raise ValueError(msg)  # pragma: no cover
+
+
+__all__ = [
+    "DefaultPutDict",
+    "LiteralType",
+]
diff -pruN 0.26.4-3/src/datamodel_code_generator/parser/_graph.py 0.45.0-1/src/datamodel_code_generator/parser/_graph.py
--- 0.26.4-3/src/datamodel_code_generator/parser/_graph.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/parser/_graph.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,67 @@
+"""Graph utilities used by parsers.
+
+This module intentionally contains only generic graph algorithms (no DataModel
+or schema-specific logic), so it can be reused across parsers without creating
+dependency cycles.
+"""
+
+from __future__ import annotations
+
+from collections.abc import Callable, Hashable
+from heapq import heappop, heappush
+from typing import TypeVar
+
+TNode = TypeVar("TNode", bound=Hashable)
+
+
+def stable_toposort(
+    nodes: list[TNode],
+    edges: dict[TNode, set[TNode]],
+    *,
+    key: Callable[[TNode], int],
+) -> list[TNode]:
+    """Stable topological sort; breaks ties by `key`.
+
+    The `edges` mapping is an adjacency list where `edges[u]` contains all `v`
+    such that `u -> v` (i.e., `u` must come before `v`).
+
+    If a cycle is detected, any remaining nodes are appended in `key` order for
+    determinism.
+    """
+    node_set = set(nodes)
+    order_index = {node: index for index, node in enumerate(nodes)}
+    indegree: dict[TNode, int] = dict.fromkeys(nodes, 0)
+    outgoing: dict[TNode, set[TNode]] = {n: set() for n in nodes}
+
+    for source in node_set & edges.keys():
+        destinations = edges[source]
+        new_destinations = destinations & node_set - outgoing[source]
+        outgoing[source].update(new_destinations)
+        for destination in new_destinations:
+            indegree[destination] += 1
+
+    outgoing_sorted = {
+        node: sorted(neighbors, key=lambda neighbor: (key(neighbor), order_index[neighbor]))
+        for node, neighbors in outgoing.items()
+    }
+
+    ready: list[tuple[int, int, TNode]] = []
+    for node in nodes:
+        if indegree[node] == 0:
+            heappush(ready, (key(node), order_index[node], node))
+
+    result: list[TNode] = []
+    while ready:
+        _, _, node = heappop(ready)
+        result.append(node)
+        for neighbor in outgoing_sorted[node]:
+            indegree[neighbor] -= 1
+            if indegree[neighbor] == 0:
+                heappush(ready, (key(neighbor), order_index[neighbor], neighbor))
+
+    remaining = sorted(
+        [node for node in nodes if node not in result],
+        key=lambda node: (key(node), order_index[node]),
+    )
+    result.extend(remaining)
+    return result
diff -pruN 0.26.4-3/src/datamodel_code_generator/parser/_scc.py 0.45.0-1/src/datamodel_code_generator/parser/_scc.py
--- 0.26.4-3/src/datamodel_code_generator/parser/_scc.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/parser/_scc.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,171 @@
+"""Strongly Connected Components detection using Tarjan's algorithm.
+
+Provides SCC detection for module dependency graphs to identify
+circular import patterns in generated code.
+"""
+
+from __future__ import annotations
+
+from enum import IntEnum
+from typing import NamedTuple
+
+from typing_extensions import TypeAlias
+
+ModulePath: TypeAlias = tuple[str, ...]
+ModuleGraph: TypeAlias = dict[ModulePath, set[ModulePath]]
+SCC: TypeAlias = set[ModulePath]
+SCCList: TypeAlias = list[SCC]
+
+_EMPTY_SET: frozenset[ModulePath] = frozenset()
+
+
+class _Phase(IntEnum):
+    """DFS traversal phase for iterative Tarjan's algorithm."""
+
+    VISIT = 0
+    POSTVISIT = 1
+
+
+class _Frame(NamedTuple):
+    """Call stack frame for iterative DFS."""
+
+    node: ModulePath
+    neighbor_idx: int
+    phase: _Phase
+
+
+class _TarjanState:
+    """Mutable state for Tarjan's SCC algorithm."""
+
+    __slots__ = ("graph", "index", "index_counter", "lowlinks", "on_stack", "result", "sorted_cache", "stack")
+
+    def __init__(self, graph: ModuleGraph) -> None:
+        self.graph = graph
+        self.index_counter: int = 0
+        self.stack: list[ModulePath] = []
+        self.lowlinks: dict[ModulePath, int] = {}
+        self.index: dict[ModulePath, int] = {}
+        self.on_stack: set[ModulePath] = set()
+        self.result: SCCList = []
+        self.sorted_cache: dict[ModulePath, list[ModulePath]] = {}
+
+    def get_sorted_neighbors(self, node: ModulePath) -> list[ModulePath]:
+        """Get sorted neighbors with lazy memoization."""
+        cached: list[ModulePath] | None = self.sorted_cache.get(node)
+        if cached is None:
+            cached = sorted(self.graph.get(node, _EMPTY_SET))
+            self.sorted_cache[node] = cached
+        return cached
+
+    def extract_scc(self, root: ModulePath) -> None:
+        """Pop nodes from stack to form an SCC rooted at the given node."""
+        scc: SCC = set()
+        while True:
+            w: ModulePath = self.stack.pop()
+            self.on_stack.remove(w)
+            scc.add(w)
+            if w == root:  # pragma: no branch
+                break
+        self.result.append(scc)
+
+    def initialize_node(self, node: ModulePath) -> None:
+        """Initialize a node for DFS traversal."""
+        self.index[node] = self.lowlinks[node] = self.index_counter
+        self.index_counter += 1
+        self.stack.append(node)
+        self.on_stack.add(node)
+
+
+def _strongconnect(state: _TarjanState, start: ModulePath) -> None:
+    """Execute Tarjan's strongconnect algorithm iteratively."""
+    state.initialize_node(start)
+    call_stack: list[_Frame] = [_Frame(start, 0, _Phase.VISIT)]
+
+    while call_stack:
+        frame: _Frame = call_stack.pop()
+        node: ModulePath = frame.node
+        neighbors: list[ModulePath] = state.get_sorted_neighbors(node)
+        neighbor_idx: int = frame.neighbor_idx
+
+        # Handle post-visit: update lowlink from child
+        if frame.phase == _Phase.POSTVISIT:
+            child: ModulePath = neighbors[neighbor_idx]
+            state.lowlinks[node] = min(state.lowlinks[node], state.lowlinks[child])
+            neighbor_idx += 1
+
+        # Process remaining neighbors
+        while neighbor_idx < len(neighbors):
+            w: ModulePath = neighbors[neighbor_idx]
+
+            if w not in state.index:
+                # Save state for post-visit
+                call_stack.append(_Frame(node, neighbor_idx, _Phase.POSTVISIT))
+                # Initialize and push unvisited neighbor
+                state.initialize_node(w)
+                call_stack.append(_Frame(w, 0, _Phase.VISIT))
+                break
+            if w in state.on_stack:
+                state.lowlinks[node] = min(state.lowlinks[node], state.index[w])
+
+            neighbor_idx += 1
+        else:
+            # All neighbors processed: check if node is SCC root
+            if state.lowlinks[node] == state.index[node]:
+                state.extract_scc(node)
+
+
+def strongly_connected_components(graph: ModuleGraph) -> SCCList:
+    """Find all strongly connected components using Tarjan's algorithm.
+
+    Uses an iterative approach to avoid Python recursion limits on large graphs.
+    Neighbors are lazily sorted and memoized for determinism with O(E log V) cost.
+
+    Args:
+        graph: Adjacency list mapping module tuple to set of dependency module tuples.
+               Each node is a tuple like ("pkg", "__init__.py") or ("pkg", "module.py").
+
+    Returns:
+        List of all SCCs, each being a set of module tuples.
+        SCCs are returned in reverse topological order (leaves first).
+        Includes all SCCs, including singleton nodes without self-loops.
+    """
+    # Collect all nodes (including those only referenced as edges)
+    all_nodes: set[ModulePath] = set(graph.keys())
+    for neighbors in graph.values():
+        all_nodes.update(neighbors)
+
+    state = _TarjanState(graph)
+
+    # Run algorithm on all unvisited nodes (sorted for determinism)
+    for node in sorted(all_nodes):
+        if node not in state.index:
+            _strongconnect(state, node)
+
+    return state.result
+
+
+def find_circular_sccs(graph: ModuleGraph) -> SCCList:
+    """Find SCCs that represent circular dependencies.
+
+    A circular SCC is one with:
+    - More than one node, OR
+    - Exactly one node with a self-loop (edge to itself)
+
+    Args:
+        graph: Module dependency graph
+
+    Returns:
+        List of circular SCCs, sorted by their minimum element for determinism
+    """
+    all_sccs: SCCList = strongly_connected_components(graph)
+    circular: SCCList = []
+
+    for scc in all_sccs:
+        if len(scc) > 1:
+            circular.append(scc)
+        elif len(scc) == 1:  # pragma: no branch
+            node: ModulePath = next(iter(scc))
+            if node in graph and node in graph[node]:
+                circular.append(scc)
+
+    return sorted(circular, key=min)
diff -pruN 0.26.4-3/src/datamodel_code_generator/parser/base.py 0.45.0-1/src/datamodel_code_generator/parser/base.py
--- 0.26.4-3/src/datamodel_code_generator/parser/base.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/parser/base.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2700 @@
+"""Abstract base parser and utilities for schema parsing.
+
+Provides the Parser abstract base class that defines the parsing algorithm,
+along with helper functions for model sorting, import resolution, and
+code generation.
+"""
+
+from __future__ import annotations
+
+import operator
+import os.path
+import re
+import sys
+from abc import ABC, abstractmethod
+from collections import Counter, OrderedDict, defaultdict
+from collections.abc import Hashable, Sequence
+from itertools import groupby
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, Callable, NamedTuple, Optional, Protocol, TypeVar, cast, runtime_checkable
+from urllib.parse import ParseResult
+from warnings import warn
+
+from pydantic import BaseModel
+from typing_extensions import TypeAlias
+
+from datamodel_code_generator import (
+    DEFAULT_SHARED_MODULE_NAME,
+    AllExportsCollisionStrategy,
+    AllExportsScope,
+    AllOfMergeMode,
+    Error,
+    ModuleSplitMode,
+    ReadOnlyWriteOnlyModelType,
+    ReuseScope,
+)
+from datamodel_code_generator.format import (
+    DEFAULT_FORMATTERS,
+    CodeFormatter,
+    DatetimeClassType,
+    Formatter,
+    PythonVersion,
+    PythonVersionMin,
+)
+from datamodel_code_generator.imports import (
+    IMPORT_ANNOTATIONS,
+    IMPORT_LITERAL,
+    IMPORT_OPTIONAL,
+    IMPORT_UNION,
+    Import,
+    Imports,
+)
+from datamodel_code_generator.model import dataclass as dataclass_model
+from datamodel_code_generator.model import msgspec as msgspec_model
+from datamodel_code_generator.model import pydantic as pydantic_model
+from datamodel_code_generator.model import pydantic_v2 as pydantic_model_v2
+from datamodel_code_generator.model.base import (
+    ALL_MODEL,
+    UNDEFINED,
+    BaseClassDataType,
+    ConstraintsBase,
+    DataModel,
+    DataModelFieldBase,
+    WrappedDefault,
+)
+from datamodel_code_generator.model.enum import Enum, Member
+from datamodel_code_generator.model.type_alias import TypeAliasBase, TypeStatement
+from datamodel_code_generator.parser import DefaultPutDict, LiteralType
+from datamodel_code_generator.parser._graph import stable_toposort
+from datamodel_code_generator.parser._scc import find_circular_sccs, strongly_connected_components
+from datamodel_code_generator.reference import ModelResolver, ModelType, Reference
+from datamodel_code_generator.types import DataType, DataTypeManager, StrictTypes
+from datamodel_code_generator.util import camel_to_snake
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable, Iterator, Mapping, Sequence
+
+    from datamodel_code_generator import DataclassArguments
+
+
+@runtime_checkable
+class HashableComparable(Hashable, Protocol):
+    """Protocol for types that are both hashable and support comparison."""
+
+    def __lt__(self, value: Any, /) -> bool: ...  # noqa: D105
+    def __le__(self, value: Any, /) -> bool: ...  # noqa: D105
+    def __gt__(self, value: Any, /) -> bool: ...  # noqa: D105
+    def __ge__(self, value: Any, /) -> bool: ...  # noqa: D105
+
+
+ModelName: TypeAlias = str
+ModelNames: TypeAlias = set[ModelName]
+ModelDeps: TypeAlias = dict[ModelName, set[ModelName]]
+OrderIndex: TypeAlias = dict[ModelName, int]
+
+ComponentId: TypeAlias = int
+Components: TypeAlias = list[list[ModelName]]
+ComponentOf: TypeAlias = dict[ModelName, ComponentId]
+ComponentEdges: TypeAlias = dict[ComponentId, set[ComponentId]]
+
+ClassNode: TypeAlias = tuple[ModelName, ...]
+ClassGraph: TypeAlias = dict[ClassNode, set[ClassNode]]
+
+
+class _KeepModelOrderDeps(NamedTuple):
+    strong: ModelDeps
+    all: ModelDeps
+
+
+class _KeepModelOrderComponents(NamedTuple):
+    components: Components
+    comp_of: ComponentOf
+
+
+def _collect_keep_model_order_deps(
+    model: DataModel,
+    *,
+    model_names: ModelNames,
+    imported: ModelNames,
+    use_deferred_annotations: bool,
+) -> tuple[set[ModelName], set[ModelName]]:
+    """Collect (strong_deps, all_deps) used by keep_model_order sorting.
+
+    - strong_deps: base class references (within-module, non-imported)
+    - all_deps: base class refs + (optionally) field refs (within-module, non-imported)
+    """
+    class_name = model.class_name
+    base_class_refs = {b.reference.short_name for b in model.base_classes if b.reference}
+    field_refs = {t.reference.short_name for f in model.fields for t in f.data_type.all_data_types if t.reference}
+
+    if use_deferred_annotations and not isinstance(model, (TypeAliasBase, pydantic_model_v2.RootModel)):
+        field_refs = set()
+
+    strong = {r for r in base_class_refs if r in model_names and r not in imported and r != class_name}
+    deps = {r for r in (base_class_refs | field_refs) if r in model_names and r not in imported and r != class_name}
+    return strong, deps
+
+
+def _build_keep_model_order_dependency_maps(
+    models: list[DataModel],
+    *,
+    model_names: ModelNames,
+    imported: ModelNames,
+    use_deferred_annotations: bool,
+) -> _KeepModelOrderDeps:
+    strong_deps: ModelDeps = {}
+    all_deps: ModelDeps = {}
+    for model in models:
+        strong, deps = _collect_keep_model_order_deps(
+            model,
+            model_names=model_names,
+            imported=imported,
+            use_deferred_annotations=use_deferred_annotations,
+        )
+        strong_deps[model.class_name] = strong
+        all_deps[model.class_name] = deps
+    return _KeepModelOrderDeps(strong=strong_deps, all=all_deps)
+
+
+def _build_keep_model_order_components(
+    all_deps: ModelDeps,
+    order_index: OrderIndex,
+) -> _KeepModelOrderComponents:
+    graph: ClassGraph = {(name,): {(dep,) for dep in deps} for name, deps in all_deps.items()}
+    sccs = strongly_connected_components(graph)
+    components: Components = [sorted((node[0] for node in scc), key=order_index.__getitem__) for scc in sccs]
+    components.sort(key=lambda members: min(order_index[n] for n in members))
+    comp_of: ComponentOf = {name: i for i, members in enumerate(components) for name in members}
+    return _KeepModelOrderComponents(components=components, comp_of=comp_of)
+
+
+def _build_keep_model_order_component_edges(
+    all_deps: ModelDeps,
+    comp_of: ComponentOf,
+    num_components: int,
+) -> ComponentEdges:
+    comp_edges: ComponentEdges = {i: set() for i in range(num_components)}
+    for name, deps in all_deps.items():
+        name_comp = comp_of[name]
+        for dep in deps:
+            if (dep_comp := comp_of[dep]) != name_comp:
+                comp_edges[dep_comp].add(name_comp)
+    return comp_edges
+
+
+def _build_keep_model_order_component_order(
+    components: Components,
+    comp_edges: ComponentEdges,
+    order_index: OrderIndex,
+) -> list[ComponentId]:
+    comp_key = [min(order_index[n] for n in members) for members in components]
+    return stable_toposort(
+        list(range(len(components))),
+        comp_edges,
+        key=lambda component_id: comp_key[component_id],
+    )
+
+
+def _build_keep_model_ordered_names(
+    ordered_comp_ids: list[ComponentId],
+    components: Components,
+    strong_deps: ModelDeps,
+    order_index: OrderIndex,
+) -> list[ModelName]:
+    ordered_names: list[ModelName] = []
+    for component_id in ordered_comp_ids:
+        members = components[component_id]
+        if len(members) > 1:
+            strong_edges: dict[ModelName, set[ModelName]] = {n: set() for n in members}
+            member_set = set(members)
+            for base in members:
+                derived_members = {member for member in members if base in strong_deps.get(member, set()) & member_set}
+                strong_edges[base].update(derived_members)
+            members = stable_toposort(members, strong_edges, key=order_index.__getitem__)
+        ordered_names.extend(members)
+    return ordered_names
+
+
+def _reorder_models_keep_model_order(
+    models: list[DataModel],
+    imports: Imports,
+    *,
+    use_deferred_annotations: bool,
+) -> None:
+    """Reorder models deterministically based on their dependencies.
+
+    Starts from class_name order and only moves models when required to satisfy dependencies.
+    Cycles are kept as SCC groups; within each SCC, base-class dependencies are prioritized.
+    """
+    models.sort(key=lambda x: x.class_name)
+    imported: ModelNames = {i for v in imports.values() for i in v}
+    model_by_name = {m.class_name: m for m in models}
+    model_names: ModelNames = set(model_by_name)
+    order_index: OrderIndex = {m.class_name: i for i, m in enumerate(models)}
+
+    deps = _build_keep_model_order_dependency_maps(
+        models,
+        model_names=model_names,
+        imported=imported,
+        use_deferred_annotations=use_deferred_annotations,
+    )
+    comps = _build_keep_model_order_components(deps.all, order_index)
+    comp_edges = _build_keep_model_order_component_edges(deps.all, comps.comp_of, len(comps.components))
+    ordered_comp_ids = _build_keep_model_order_component_order(comps.components, comp_edges, order_index)
+    ordered_names = _build_keep_model_ordered_names(ordered_comp_ids, comps.components, deps.strong, order_index)
+    models[:] = [model_by_name[name] for name in ordered_names]
+
+
+SPECIAL_PATH_FORMAT: str = "#-datamodel-code-generator-#-{}-#-special-#"
+
+
+def get_special_path(keyword: str, path: list[str]) -> list[str]:
+    """Create a special path marker for internal reference tracking."""
+    return [*path, SPECIAL_PATH_FORMAT.format(keyword)]
+
+
+escape_characters = str.maketrans({
+    "\u0000": r"\x00",  # Null byte
+    "\\": r"\\",
+    "'": r"\'",
+    "\b": r"\b",
+    "\f": r"\f",
+    "\n": r"\n",
+    "\r": r"\r",
+    "\t": r"\t",
+})
+
+
+def to_hashable(item: Any) -> HashableComparable:  # noqa: PLR0911
+    """Convert an item to a hashable and comparable representation.
+
+    Returns a value that is both hashable and supports comparison operators.
+    Used for caching and deduplication of models.
+    """
+    if isinstance(
+        item,
+        (
+            list,
+            tuple,
+        ),
+    ):
+        try:
+            return tuple(sorted((to_hashable(i) for i in item), key=lambda v: (str(type(v)), v)))
+        except TypeError:
+            # Fallback when mixed, non-comparable types are present; preserve original order
+            return tuple(to_hashable(i) for i in item)
+    if isinstance(item, dict):
+        return tuple(
+            sorted(
+                (
+                    k,
+                    to_hashable(v),
+                )
+                for k, v in item.items()
+            )
+        )
+    if isinstance(item, set):  # pragma: no cover
+        return frozenset(to_hashable(i) for i in item)  # type: ignore[return-value]
+    if isinstance(item, BaseModel):
+        return to_hashable(item.dict())
+    if item is None:
+        return ""
+    return item  # type: ignore[return-value]
+
+
+def dump_templates(templates: list[DataModel]) -> str:
+    """Join model templates into a single code string."""
+    return "\n\n\n".join(str(m) for m in templates)
+
+
+def iter_models_field_data_types(
+    models: Iterable[DataModel],
+) -> Iterator[tuple[DataModel, DataModelFieldBase, DataType]]:
+    """Yield (model, field, data_type) for all models, fields, and nested data types."""
+    for model in models:
+        for field in model.fields:
+            for data_type in field.data_type.all_data_types:
+                yield model, field, data_type
+
+
+ReferenceMapSet = dict[str, set[str]]
+SortedDataModels = dict[str, DataModel]
+
+MAX_RECURSION_COUNT: int = sys.getrecursionlimit()
+
+
+def add_model_path_to_list(
+    paths: list[str] | None,
+    model: DataModel,
+    /,
+) -> list[str]:
+    """
+    Auxiliary method which adds model path to list, provided the following hold.
+
+    - model is not a type alias
+    - path is not already in the list.
+
+    """
+    if paths is None:
+        paths = []
+    if model.is_alias:
+        return paths
+    if (path := model.path) in paths:
+        return paths
+    paths.append(path)
+    return paths
+
+
+def sort_data_models(  # noqa: PLR0912, PLR0915
+    unsorted_data_models: list[DataModel],
+    sorted_data_models: SortedDataModels | None = None,
+    require_update_action_models: list[str] | None = None,
+    recursion_count: int = MAX_RECURSION_COUNT,
+) -> tuple[list[DataModel], SortedDataModels, list[str]]:
+    """Sort data models by dependency order for correct forward references."""
+    if sorted_data_models is None:
+        sorted_data_models = OrderedDict()
+    if require_update_action_models is None:
+        require_update_action_models = []
+    sorted_model_count: int = len(sorted_data_models)
+
+    unresolved_references: list[DataModel] = []
+    for model in unsorted_data_models:
+        if not model.reference_classes:
+            sorted_data_models[model.path] = model
+        elif model.path in model.reference_classes and len(model.reference_classes) == 1:  # only self-referencing
+            sorted_data_models[model.path] = model
+            add_model_path_to_list(require_update_action_models, model)
+        elif (
+            not model.reference_classes - {model.path} - set(sorted_data_models)
+        ):  # reference classes have been resolved
+            sorted_data_models[model.path] = model
+            if model.path in model.reference_classes:
+                add_model_path_to_list(require_update_action_models, model)
+        else:
+            unresolved_references.append(model)
+    if unresolved_references:
+        if sorted_model_count != len(sorted_data_models) and recursion_count:
+            try:
+                return sort_data_models(
+                    unresolved_references,
+                    sorted_data_models,
+                    require_update_action_models,
+                    recursion_count - 1,
+                )
+            except RecursionError:  # pragma: no cover
+                pass
+
+        # sort on base_class dependency
+        while True:
+            ordered_models: list[tuple[int, DataModel]] = []
+            unresolved_reference_model_names = [m.path for m in unresolved_references]
+            for model in unresolved_references:
+                if isinstance(model, pydantic_model_v2.RootModel):
+                    indexes = [
+                        unresolved_reference_model_names.index(ref_path)
+                        for f in model.fields
+                        for t in f.data_type.all_data_types
+                        if t.reference and (ref_path := t.reference.path) in unresolved_reference_model_names
+                    ]
+                else:
+                    indexes = [
+                        unresolved_reference_model_names.index(b.reference.path)
+                        for b in model.base_classes
+                        if b.reference and b.reference.path in unresolved_reference_model_names
+                    ]
+                if indexes:
+                    ordered_models.append((
+                        max(indexes),
+                        model,
+                    ))
+                else:
+                    ordered_models.append((
+                        -1,
+                        model,
+                    ))
+            sorted_unresolved_models = [m[1] for m in sorted(ordered_models, key=operator.itemgetter(0))]
+            if sorted_unresolved_models == unresolved_references:
+                break
+            unresolved_references = sorted_unresolved_models
+
+        # circular reference
+        unsorted_data_model_names = set(unresolved_reference_model_names)
+        for model in unresolved_references:
+            unresolved_model = model.reference_classes - {model.path} - set(sorted_data_models)
+            base_models = [getattr(s.reference, "path", None) for s in model.base_classes]
+            update_action_parent = set(require_update_action_models).intersection(base_models)
+            if not unresolved_model:
+                sorted_data_models[model.path] = model
+                if update_action_parent:
+                    add_model_path_to_list(require_update_action_models, model)
+                continue
+            if not unresolved_model - unsorted_data_model_names:
+                sorted_data_models[model.path] = model
+                add_model_path_to_list(require_update_action_models, model)
+                continue
+            # unresolved
+            unresolved_classes = ", ".join(
+                f"[class: {item.path} references: {item.reference_classes}]" for item in unresolved_references
+            )
+            msg = f"A Parser can not resolve classes: {unresolved_classes}."
+            raise Exception(msg)  # noqa: TRY002
+    return unresolved_references, sorted_data_models, require_update_action_models
+
+
+def relative(
+    current_module: str,
+    reference: str,
+    *,
+    reference_is_module: bool = False,
+    current_is_init: bool = False,
+) -> tuple[str, str]:
+    """Find relative module path.
+
+    Args:
+        current_module: Current module path (e.g., "foo.bar")
+        reference: Reference path (e.g., "foo.baz.ClassName" or "foo.baz" if reference_is_module)
+        reference_is_module: If True, treat reference as a module path (not module.class)
+        current_is_init: If True, treat current_module as a package __init__.py (adds depth)
+
+    Returns:
+        Tuple of (from_path, import_name) for constructing import statements
+    """
+    if current_is_init:
+        current_module_path = [*current_module.split("."), "__init__"] if current_module else ["__init__"]
+    else:
+        current_module_path = current_module.split(".") if current_module else []
+
+    if reference_is_module:
+        reference_path = reference.split(".") if reference else []
+        name = reference_path[-1] if reference_path else ""
+    else:
+        *reference_path, name = reference.split(".")
+
+    if current_module_path == reference_path:
+        return "", ""
+
+    i = 0
+    for x, y in zip(current_module_path, reference_path):
+        if x != y:
+            break
+        i += 1
+
+    left = "." * (len(current_module_path) - i)
+    right = ".".join(reference_path[i:])
+
+    if not left:
+        left = "."
+    if not right:
+        right = name
+    elif "." in right:
+        extra, right = right.rsplit(".", 1)
+        left += extra
+
+    return left, right
+
+
+def is_ancestor_package_reference(current_module: str, reference: str) -> bool:
+    """Check if reference is in an ancestor package (__init__.py).
+
+    When the reference's module path is an ancestor (prefix) of the current module,
+    the reference is in an ancestor package's __init__.py file.
+
+    Args:
+        current_module: The current module path (e.g., "v0.mammal.canine")
+        reference: The full reference path (e.g., "v0.Animal")
+
+    Returns:
+        True if the reference is in an ancestor package, False otherwise.
+
+    Examples:
+        - current="v0.animal", ref="v0.Animal" -> True (immediate parent)
+        - current="v0.mammal.canine", ref="v0.Animal" -> True (grandparent)
+        - current="v0.animal", ref="v0.animal.Dog" -> False (same or child)
+        - current="pets", ref="Animal" -> True (root package is immediate parent)
+    """
+    current_path = current_module.split(".") if current_module else []
+    *reference_path, _ = reference.split(".")
+
+    if not current_path:
+        return False
+
+    # Case 1: Direct parent package (includes root package when reference_path is empty)
+    # e.g., current="pets", ref="Animal" -> current_path[:-1]=[] == reference_path=[]
+    if current_path[:-1] == reference_path:
+        return True
+
+    # Case 2: Deeper ancestor package (reference_path must be non-empty proper prefix)
+    # e.g., current="v0.mammal.canine", ref="v0.Animal" -> ["v0"] is prefix of ["v0","mammal","canine"]
+    return (
+        len(reference_path) > 0
+        and len(reference_path) < len(current_path)
+        and current_path[: len(reference_path)] == reference_path
+    )
+
+
+def exact_import(from_: str, import_: str, short_name: str) -> tuple[str, str]:
+    """Create exact import path to avoid relative import issues."""
+    if from_ == len(from_) * ".":
+        # Prevents "from . import foo" becoming "from ..foo import Foo"
+        # or "from .. import foo" becoming "from ...foo import Foo"
+        # when our imported module has the same parent
+        return f"{from_}{import_}", short_name
+    return f"{from_}.{import_}", short_name
+
+
+def get_module_directory(module: tuple[str, ...]) -> tuple[str, ...]:
+    """Get the directory portion of a module tuple.
+
+    Note: Module tuples in module_models do NOT include .py extension.
+    The last element is either the module name (e.g., "issuing") or empty for root.
+
+    Examples:
+        ("pkg",) -> ("pkg",) - root module
+        ("pkg", "issuing") -> ("pkg",) - submodule
+        ("foo", "bar", "baz") -> ("foo", "bar") - deeply nested module
+    """
+    if not module:
+        return ()
+    if len(module) == 1:
+        return module
+    return module[:-1]
+
+
+@runtime_checkable
+class Child(Protocol):
+    """Protocol for objects with a parent reference."""
+
+    @property
+    def parent(self) -> Any | None:
+        """Get the parent object reference."""
+        raise NotImplementedError
+
+
+T = TypeVar("T")
+
+
+def get_most_of_parent(value: Any, type_: type[T] | None = None) -> T | None:
+    """Traverse parent chain to find the outermost matching parent."""
+    if isinstance(value, Child) and (type_ is None or not isinstance(value, type_)):
+        return get_most_of_parent(value.parent, type_)
+    return value
+
+
+def title_to_class_name(title: str) -> str:
+    """Convert a schema title to a valid Python class name."""
+    classname = re.sub(r"[^A-Za-z0-9]+", " ", title)
+    return "".join(x for x in classname.title() if not x.isspace())
+
+
+def _find_base_classes(model: DataModel) -> list[DataModel]:
+    """Get direct base class DataModels."""
+    return [b.reference.source for b in model.base_classes if b.reference and isinstance(b.reference.source, DataModel)]
+
+
+def _find_field(original_name: str, models: list[DataModel]) -> DataModelFieldBase | None:
+    """Find a field by original_name in the models and their base classes."""
+    for model in models:
+        for field in model.iter_all_fields():  # pragma: no cover
+            if field.original_name == original_name:
+                return field
+    return None  # pragma: no cover
+
+
+def _copy_data_types(data_types: list[DataType]) -> list[DataType]:
+    """Deep copy a list of DataType objects, preserving references."""
+    copied_data_types: list[DataType] = []
+    for data_type_ in data_types:
+        if data_type_.reference:
+            copied_data_types.append(data_type_.__class__(reference=data_type_.reference))
+        elif data_type_.data_types:  # pragma: no cover
+            copied_data_type = data_type_.copy()
+            copied_data_type.data_types = _copy_data_types(data_type_.data_types)
+            copied_data_types.append(copied_data_type)
+        else:
+            copied_data_types.append(data_type_.copy())
+    return copied_data_types
+
+
+class Result(BaseModel):
+    """Generated code result with optional source file reference."""
+
+    body: str
+    future_imports: str = ""
+    source: Optional[Path] = None  # noqa: UP045
+
+
+class Source(BaseModel):
+    """Schema source file with path and content."""
+
+    path: Path
+    text: str
+
+    @classmethod
+    def from_path(cls, path: Path, base_path: Path, encoding: str) -> Source:
+        """Create a Source from a file path relative to base_path."""
+        return cls(
+            path=path.relative_to(base_path),
+            text=path.read_text(encoding=encoding),
+        )
+
+
+class Parser(ABC):
+    """Abstract base class for schema parsers.
+
+    Provides the parsing algorithm and code generation. Subclasses implement
+    parse_raw() to handle specific schema formats.
+    """
+
+    def __init__(  # noqa: PLR0913, PLR0915
+        self,
+        source: str | Path | list[Path] | ParseResult,
+        *,
+        data_model_type: type[DataModel] = pydantic_model.BaseModel,
+        data_model_root_type: type[DataModel] = pydantic_model.CustomRootType,
+        data_type_manager_type: type[DataTypeManager] = pydantic_model.DataTypeManager,
+        data_model_field_type: type[DataModelFieldBase] = pydantic_model.DataModelField,
+        base_class: str | None = None,
+        additional_imports: list[str] | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        target_python_version: PythonVersion = PythonVersionMin,
+        dump_resolve_reference_action: Callable[[Iterable[str]], str] | None = None,
+        validation: bool = False,
+        field_constraints: bool = False,
+        snake_case_field: bool = False,
+        strip_default_none: bool = False,
+        aliases: Mapping[str, str] | None = None,
+        allow_population_by_field_name: bool = False,
+        apply_default_values_for_required_fields: bool = False,
+        allow_extra_fields: bool = False,
+        extra_fields: str | None = None,
+        force_optional_for_required_fields: bool = False,
+        class_name: str | None = None,
+        use_standard_collections: bool = False,
+        base_path: Path | None = None,
+        use_schema_description: bool = False,
+        use_field_description: bool = False,
+        use_attribute_docstrings: bool = False,
+        use_inline_field_description: bool = False,
+        use_default_kwarg: bool = False,
+        reuse_model: bool = False,
+        reuse_scope: ReuseScope | None = None,
+        shared_module_name: str = DEFAULT_SHARED_MODULE_NAME,
+        encoding: str = "utf-8",
+        enum_field_as_literal: LiteralType | None = None,
+        set_default_enum_member: bool = False,
+        use_subclass_enum: bool = False,
+        use_specialized_enum: bool = True,
+        strict_nullable: bool = False,
+        use_generic_container_types: bool = False,
+        enable_faux_immutability: bool = False,
+        remote_text_cache: DefaultPutDict[str, str] | None = None,
+        disable_appending_item_suffix: bool = False,
+        strict_types: Sequence[StrictTypes] | None = None,
+        empty_enum_field_name: str | None = None,
+        custom_class_name_generator: Callable[[str], str] | None = title_to_class_name,
+        field_extra_keys: set[str] | None = None,
+        field_include_all_keys: bool = False,
+        field_extra_keys_without_x_prefix: set[str] | None = None,
+        wrap_string_literal: bool | None = None,
+        use_title_as_name: bool = False,
+        use_operation_id_as_name: bool = False,
+        use_unique_items_as_set: bool = False,
+        allof_merge_mode: AllOfMergeMode = AllOfMergeMode.Constraints,
+        http_headers: Sequence[tuple[str, str]] | None = None,
+        http_ignore_tls: bool = False,
+        use_annotated: bool = False,
+        use_serialize_as_any: bool = False,
+        use_non_positive_negative_number_constrained_types: bool = False,
+        use_decimal_for_multiple_of: bool = False,
+        original_field_name_delimiter: str | None = None,
+        use_double_quotes: bool = False,
+        use_union_operator: bool = False,
+        allow_responses_without_content: bool = False,
+        collapse_root_models: bool = False,
+        skip_root_model: bool = False,
+        use_type_alias: bool = False,
+        special_field_name_prefix: str | None = None,
+        remove_special_field_name_prefix: bool = False,
+        capitalise_enum_members: bool = False,
+        keep_model_order: bool = False,
+        use_one_literal_as_default: bool = False,
+        use_enum_values_in_discriminator: bool = False,
+        known_third_party: list[str] | None = None,
+        custom_formatters: list[str] | None = None,
+        custom_formatters_kwargs: dict[str, Any] | None = None,
+        use_pendulum: bool = False,
+        http_query_parameters: Sequence[tuple[str, str]] | None = None,
+        treat_dot_as_module: bool = False,
+        use_exact_imports: bool = False,
+        default_field_extras: dict[str, Any] | None = None,
+        target_datetime_class: DatetimeClassType | None = None,
+        keyword_only: bool = False,
+        frozen_dataclasses: bool = False,
+        no_alias: bool = False,
+        use_frozen_field: bool = False,
+        formatters: list[Formatter] = DEFAULT_FORMATTERS,
+        parent_scoped_naming: bool = False,
+        dataclass_arguments: DataclassArguments | None = None,
+        type_mappings: list[str] | None = None,
+        read_only_write_only_model_type: ReadOnlyWriteOnlyModelType | None = None,
+    ) -> None:
+        """Initialize the Parser with configuration options."""
+        self.keyword_only = keyword_only
+        self.frozen_dataclasses = frozen_dataclasses
+        self.data_type_manager: DataTypeManager = data_type_manager_type(
+            python_version=target_python_version,
+            use_standard_collections=use_standard_collections,
+            use_generic_container_types=use_generic_container_types,
+            use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
+            use_decimal_for_multiple_of=use_decimal_for_multiple_of,
+            strict_types=strict_types,
+            use_union_operator=use_union_operator,
+            use_pendulum=use_pendulum,
+            target_datetime_class=target_datetime_class,
+            treat_dot_as_module=treat_dot_as_module,
+            use_serialize_as_any=use_serialize_as_any,
+        )
+        self.data_model_type: type[DataModel] = data_model_type
+        self.data_model_root_type: type[DataModel] = data_model_root_type
+        self.data_model_field_type: type[DataModelFieldBase] = data_model_field_type
+
+        self.imports: Imports = Imports(use_exact_imports)
+        self.use_exact_imports: bool = use_exact_imports
+        self._append_additional_imports(additional_imports=additional_imports)
+
+        self.base_class: str | None = base_class
+        self.target_python_version: PythonVersion = target_python_version
+        self.results: list[DataModel] = []
+        self.dump_resolve_reference_action: Callable[[Iterable[str]], str] | None = dump_resolve_reference_action
+        self.validation: bool = validation
+        self.field_constraints: bool = field_constraints
+        self.snake_case_field: bool = snake_case_field
+        self.strip_default_none: bool = strip_default_none
+        self.apply_default_values_for_required_fields: bool = apply_default_values_for_required_fields
+        self.force_optional_for_required_fields: bool = force_optional_for_required_fields
+        self.use_schema_description: bool = use_schema_description
+        self.use_field_description: bool = use_field_description
+        self.use_inline_field_description: bool = use_inline_field_description
+        self.use_default_kwarg: bool = use_default_kwarg
+        self.reuse_model: bool = reuse_model
+        self.reuse_scope: ReuseScope | None = reuse_scope
+        self.shared_module_name: str = shared_module_name
+        self.encoding: str = encoding
+        self.enum_field_as_literal: LiteralType | None = enum_field_as_literal
+        self.set_default_enum_member: bool = set_default_enum_member
+        self.use_subclass_enum: bool = use_subclass_enum
+        self.use_specialized_enum: bool = use_specialized_enum
+        self.strict_nullable: bool = strict_nullable
+        self.use_generic_container_types: bool = use_generic_container_types
+        self.use_union_operator: bool = use_union_operator
+        self.enable_faux_immutability: bool = enable_faux_immutability
+        self.custom_class_name_generator: Callable[[str], str] | None = custom_class_name_generator
+        self.field_extra_keys: set[str] = field_extra_keys or set()
+        self.field_extra_keys_without_x_prefix: set[str] = field_extra_keys_without_x_prefix or set()
+        self.field_include_all_keys: bool = field_include_all_keys
+
+        self.remote_text_cache: DefaultPutDict[str, str] = remote_text_cache or DefaultPutDict()
+        self.current_source_path: Path | None = None
+        self.use_title_as_name: bool = use_title_as_name
+        self.use_operation_id_as_name: bool = use_operation_id_as_name
+        self.use_unique_items_as_set: bool = use_unique_items_as_set
+        self.allof_merge_mode: AllOfMergeMode = allof_merge_mode
+        self.dataclass_arguments = dataclass_arguments
+
+        if base_path:
+            self.base_path = base_path
+        elif isinstance(source, Path):
+            self.base_path = source.absolute() if source.is_dir() else source.absolute().parent
+        else:
+            self.base_path = Path.cwd()
+
+        self.source: str | Path | list[Path] | ParseResult = source
+        self.custom_template_dir = custom_template_dir
+        self.extra_template_data: defaultdict[str, Any] = extra_template_data or defaultdict(dict)
+
+        if allow_population_by_field_name:
+            self.extra_template_data[ALL_MODEL]["allow_population_by_field_name"] = True
+
+        if allow_extra_fields:
+            self.extra_template_data[ALL_MODEL]["allow_extra_fields"] = True
+
+        if extra_fields:
+            self.extra_template_data[ALL_MODEL]["extra_fields"] = extra_fields
+
+        if enable_faux_immutability:
+            self.extra_template_data[ALL_MODEL]["allow_mutation"] = False
+
+        if use_attribute_docstrings:
+            self.extra_template_data[ALL_MODEL]["use_attribute_docstrings"] = True
+
+        self.model_resolver = ModelResolver(
+            base_url=source.geturl() if isinstance(source, ParseResult) else None,
+            singular_name_suffix="" if disable_appending_item_suffix else None,
+            aliases=aliases,
+            empty_field_name=empty_enum_field_name,
+            snake_case_field=snake_case_field,
+            custom_class_name_generator=custom_class_name_generator,
+            base_path=self.base_path,
+            original_field_name_delimiter=original_field_name_delimiter,
+            special_field_name_prefix=special_field_name_prefix,
+            remove_special_field_name_prefix=remove_special_field_name_prefix,
+            capitalise_enum_members=capitalise_enum_members,
+            no_alias=no_alias,
+            parent_scoped_naming=parent_scoped_naming,
+            treat_dot_as_module=treat_dot_as_module,
+        )
+        self.class_name: str | None = class_name
+        self.wrap_string_literal: bool | None = wrap_string_literal
+        self.http_headers: Sequence[tuple[str, str]] | None = http_headers
+        self.http_query_parameters: Sequence[tuple[str, str]] | None = http_query_parameters
+        self.http_ignore_tls: bool = http_ignore_tls
+        self.use_annotated: bool = use_annotated
+        if self.use_annotated and not self.field_constraints:  # pragma: no cover
+            msg = "`use_annotated=True` has to be used with `field_constraints=True`"
+            raise Exception(msg)  # noqa: TRY002
+        self.use_serialize_as_any: bool = use_serialize_as_any
+        self.use_non_positive_negative_number_constrained_types = use_non_positive_negative_number_constrained_types
+        self.use_double_quotes = use_double_quotes
+        self.allow_responses_without_content = allow_responses_without_content
+        self.collapse_root_models = collapse_root_models
+        self.skip_root_model = skip_root_model
+        self.use_type_alias = use_type_alias
+        self.capitalise_enum_members = capitalise_enum_members
+        self.keep_model_order = keep_model_order
+        self.use_one_literal_as_default = use_one_literal_as_default
+        self.use_enum_values_in_discriminator = use_enum_values_in_discriminator
+        self.known_third_party = known_third_party
+        self.custom_formatter = custom_formatters
+        self.custom_formatters_kwargs = custom_formatters_kwargs
+        self.treat_dot_as_module = treat_dot_as_module
+        self.default_field_extras: dict[str, Any] | None = default_field_extras
+        self.formatters: list[Formatter] = formatters
+        self.type_mappings: dict[tuple[str, str], str] = Parser._parse_type_mappings(type_mappings)
+        self.read_only_write_only_model_type: ReadOnlyWriteOnlyModelType | None = read_only_write_only_model_type
+        self.use_frozen_field: bool = use_frozen_field
+
+    @property
+    def field_name_model_type(self) -> ModelType:
+        """Get the ModelType for field name validation based on data_model_type.
+
+        Returns ModelType.PYDANTIC for Pydantic models (which have reserved attributes
+        like 'schema', 'model_fields', etc.), and ModelType.CLASS for other model types
+        (TypedDict, dataclass, msgspec) which don't have such constraints.
+        """
+        if issubclass(
+            self.data_model_type,
+            (pydantic_model.BaseModel, pydantic_model_v2.BaseModel),
+        ):
+            return ModelType.PYDANTIC
+        return ModelType.CLASS
+
+    @staticmethod
+    def _parse_type_mappings(type_mappings: list[str] | None) -> dict[tuple[str, str], str]:
+        """Parse type mappings from CLI format to internal format.
+
+        Supports two formats:
+        - "type+format=target" (e.g., "string+binary=string")
+        - "format=target" (e.g., "binary=string", assumes type="string")
+
+        Returns a dict mapping (type, format) tuples to target type names.
+        """
+        if not type_mappings:
+            return {}
+
+        result: dict[tuple[str, str], str] = {}
+        for mapping in type_mappings:
+            if "=" not in mapping:
+                msg = f"Invalid type mapping format: {mapping!r}. Expected 'type+format=target' or 'format=target'."
+                raise ValueError(msg)
+
+            source, target = mapping.split("=", 1)
+            if "+" in source:
+                type_, format_ = source.split("+", 1)
+            else:
+                # Default to "string" type if only format is specified
+                type_ = "string"
+                format_ = source
+
+            result[type_, format_] = target
+
+        return result
+
+    @property
+    def iter_source(self) -> Iterator[Source]:
+        """Iterate over all source files to be parsed."""
+        if isinstance(self.source, str):
+            yield Source(path=Path(), text=self.source)
+        elif isinstance(self.source, Path):  # pragma: no cover
+            if self.source.is_dir():
+                for path in sorted(self.source.rglob("*"), key=lambda p: p.name):
+                    if path.is_file():
+                        yield Source.from_path(path, self.base_path, self.encoding)
+            else:
+                yield Source.from_path(self.source, self.base_path, self.encoding)
+        elif isinstance(self.source, list):  # pragma: no cover
+            for path in self.source:
+                yield Source.from_path(path, self.base_path, self.encoding)
+        else:
+            yield Source(
+                path=Path(self.source.path),
+                text=self.remote_text_cache.get_or_put(self.source.geturl(), default_factory=self._get_text_from_url),
+            )
+
+    def _append_additional_imports(self, additional_imports: list[str] | None) -> None:
+        if additional_imports is None:
+            additional_imports = []
+
+        for additional_import_string in additional_imports:
+            if additional_import_string is None:  # pragma: no cover
+                continue
+            new_import = Import.from_full_path(additional_import_string)
+            self.imports.append(new_import)
+
+    def _get_text_from_url(self, url: str) -> str:
+        from datamodel_code_generator.http import get_body  # noqa: PLC0415
+
+        return self.remote_text_cache.get_or_put(
+            url,
+            default_factory=lambda _url: get_body(
+                url, self.http_headers, self.http_ignore_tls, self.http_query_parameters
+            ),
+        )
+
+    @classmethod
+    def get_url_path_parts(cls, url: ParseResult) -> list[str]:
+        """Split URL into scheme/host and path components."""
+        return [
+            f"{url.scheme}://{url.hostname}",
+            *url.path.split("/")[1:],
+        ]
+
+    @property
+    def data_type(self) -> type[DataType]:
+        """Get the DataType class from the type manager."""
+        return self.data_type_manager.data_type
+
+    @abstractmethod
+    def parse_raw(self) -> None:
+        """Parse the raw schema source. Must be implemented by subclasses."""
+        raise NotImplementedError
+
+    @classmethod
+    def _replace_model_in_list(
+        cls,
+        models: list[DataModel],
+        original: DataModel,
+        replacement: DataModel,
+    ) -> None:
+        """Replace model at its position in list."""
+        models.insert(models.index(original), replacement)
+        models.remove(original)
+
+    def __delete_duplicate_models(self, models: list[DataModel]) -> None:
+        model_class_names: dict[str, DataModel] = {}
+        model_to_duplicate_models: defaultdict[DataModel, list[DataModel]] = defaultdict(list)
+        for model in models.copy():
+            if isinstance(model, self.data_model_root_type):
+                root_data_type = model.fields[0].data_type
+
+                # backward compatible
+                # Remove duplicated root model
+                if (
+                    root_data_type.reference
+                    and not root_data_type.is_dict
+                    and not root_data_type.is_list
+                    and root_data_type.reference.source in models
+                    and root_data_type.reference.name
+                    == self.model_resolver.get_class_name(model.reference.original_name, unique=False).name
+                ):
+                    model.reference.replace_children_references(root_data_type.reference)
+                    models.remove(model)
+                    for data_type in model.all_data_types:
+                        if data_type.reference:
+                            data_type.remove_reference()
+                    continue
+
+                # Remove self from all DataModel children's base_classes
+                for child in model.reference.iter_data_model_children():
+                    child.base_classes = [bc for bc in child.base_classes if bc.reference != model.reference]
+                    if not child.base_classes:  # pragma: no cover
+                        child.set_base_class()
+
+            class_name = model.duplicate_class_name or model.class_name
+            if class_name in model_class_names:
+                original_model = model_class_names[class_name]
+                if model.get_dedup_key(model.duplicate_class_name, use_default=False) == original_model.get_dedup_key(
+                    original_model.duplicate_class_name, use_default=False
+                ):
+                    model_to_duplicate_models[original_model].append(model)
+                    continue
+            model_class_names[class_name] = model
+        for model, duplicate_models in model_to_duplicate_models.items():
+            for duplicate_model in duplicate_models:
+                duplicate_model.reference.replace_children_references(model.reference)
+                # Deduplicate base_classes in all DataModel children
+                for child in duplicate_model.reference.iter_data_model_children():
+                    child.base_classes = list(
+                        {f"{c.module_name}.{c.type_hint}": c for c in child.base_classes}.values()
+                    )
+                models.remove(duplicate_model)
+
+    @classmethod
+    def __replace_duplicate_name_in_module(cls, models: list[DataModel]) -> None:
+        scoped_model_resolver = ModelResolver(
+            exclude_names={i.alias or i.import_ for m in models for i in m.imports},
+            duplicate_name_suffix="Model",
+        )
+
+        model_names: dict[str, DataModel] = {}
+        for model in models:
+            class_name: str = model.class_name
+            generated_name: str = scoped_model_resolver.add([model.path], class_name, unique=True, class_name=True).name
+            if class_name != generated_name:
+                model.class_name = generated_name
+            model_names[model.class_name] = model
+
+        for model in models:
+            duplicate_name = model.duplicate_class_name
+            # check only first desired name
+            if duplicate_name and duplicate_name not in model_names:
+                del model_names[model.class_name]
+                model.class_name = duplicate_name
+                model_names[duplicate_name] = model
+
+    def __change_from_import(  # noqa: PLR0913, PLR0914
+        self,
+        models: list[DataModel],
+        imports: Imports,
+        scoped_model_resolver: ModelResolver,
+        *,
+        init: bool,
+        internal_modules: set[tuple[str, ...]] | None = None,
+        model_path_to_module_name: dict[str, str] | None = None,
+    ) -> None:
+        model_paths = {model.path for model in models}
+        internal_modules = internal_modules or set()
+        model_path_to_module_name = model_path_to_module_name or {}
+
+        for model in models:
+            scoped_model_resolver.add([model.path], model.class_name)
+        for model in models:
+            before_import = model.imports
+            imports.append(before_import)
+            current_module_name = model_path_to_module_name.get(model.path, model.module_name)
+            for data_type in model.all_data_types:
+                if not data_type.reference or data_type.reference.path in model_paths:
+                    continue
+
+                ref_module_name = model_path_to_module_name.get(
+                    data_type.reference.path,
+                    data_type.full_name.rsplit(".", 1)[0] if "." in data_type.full_name else "",
+                )
+                target_full_name = (
+                    f"{ref_module_name}.{data_type.reference.short_name}"
+                    if ref_module_name
+                    else data_type.reference.short_name
+                )
+
+                if isinstance(data_type, BaseClassDataType):
+                    left, right = relative(current_module_name, target_full_name)
+                    is_ancestor = is_ancestor_package_reference(current_module_name, target_full_name)
+                    from_ = left if is_ancestor else (f"{left}{right}" if left.endswith(".") else f"{left}.{right}")
+                    import_ = data_type.reference.short_name
+                    full_path = from_, import_
+                else:
+                    from_, import_ = full_path = relative(current_module_name, target_full_name)
+                    if imports.use_exact:
+                        from_, import_ = exact_import(from_, import_, data_type.reference.short_name)
+                    import_ = import_.replace("-", "_")
+                    current_module_path = tuple(current_module_name.split(".")) if current_module_name else ()
+                    if (  # pragma: no cover
+                        len(current_module_path) > 1
+                        and current_module_path[-1].count(".") > 0
+                        and not self.treat_dot_as_module
+                    ):
+                        rel_path_depth = current_module_path[-1].count(".")
+                        from_ = from_[rel_path_depth:]
+
+                    ref_module = tuple(target_full_name.split(".")[:-1])
+
+                    is_module_class_collision = (
+                        ref_module and import_ == data_type.reference.short_name and ref_module[-1] == import_
+                    )
+
+                    if from_ and (ref_module in internal_modules or is_module_class_collision):
+                        from_ = f"{from_}{import_}" if from_.endswith(".") else f"{from_}.{import_}"
+                        import_ = data_type.reference.short_name
+                        full_path = from_, import_
+
+                alias = scoped_model_resolver.add(full_path, import_).name
+
+                name = data_type.reference.short_name
+                if from_ and import_ and alias != name:
+                    data_type.alias = alias if data_type.reference.short_name == import_ else f"{alias}.{name}"
+
+                if init and not target_full_name.startswith(current_module_name + "."):
+                    from_ = "." + from_
+                imports.append(
+                    Import(
+                        from_=from_,
+                        import_=import_,
+                        alias=alias,
+                        reference_path=data_type.reference.path,
+                    ),
+                )
+            after_import = model.imports
+            if before_import != after_import:
+                imports.append(after_import)
+
+    @classmethod
+    def __extract_inherited_enum(cls, models: list[DataModel]) -> None:
+        for model in models.copy():
+            if model.fields:
+                continue
+            enums: list[Enum] = []
+            for base_model in model.base_classes:
+                if not base_model.reference:
+                    continue
+                source_model = base_model.reference.source
+                if isinstance(source_model, Enum):
+                    enums.append(source_model)
+            if enums:
+                merged_enum = enums[0].__class__(
+                    fields=[f for e in enums for f in e.fields],
+                    description=model.description,
+                    reference=model.reference,
+                )
+                cls._replace_model_in_list(models, model, merged_enum)
+
+    def _create_discriminator_data_type(
+        self,
+        enum_source: Enum | None,
+        type_names: list[str],
+        discriminator_model: DataModel,
+        imports: Imports,
+    ) -> DataType:
+        """Create a data type for discriminator field, using enum literals if available."""
+        if enum_source:
+            enum_class_name = enum_source.reference.short_name
+            enum_member_literals: list[tuple[str, str]] = []
+            for value in type_names:
+                member = enum_source.find_member(value)
+                if member and member.field.name:
+                    enum_member_literals.append((enum_class_name, member.field.name))
+                else:  # pragma: no cover
+                    enum_member_literals.append((enum_class_name, value))
+            data_type = self.data_type(enum_member_literals=enum_member_literals)
+            if enum_source.module_path != discriminator_model.module_path:  # pragma: no cover
+                imports.append(Import.from_full_path(enum_source.name))
+        else:
+            data_type = self.data_type(literals=type_names)
+        return data_type
+
+    def __apply_discriminator_type(  # noqa: PLR0912, PLR0914, PLR0915
+        self,
+        models: list[DataModel],
+        imports: Imports,
+    ) -> None:
+        for model in models:  # noqa: PLR1702
+            for field in model.fields:
+                discriminator = field.extras.get("discriminator")
+                if not discriminator or not isinstance(discriminator, dict):
+                    continue
+                property_name = discriminator.get("propertyName")
+                if not property_name:  # pragma: no cover
+                    continue
+                field_name, alias = self.model_resolver.get_valid_field_name_and_alias(
+                    field_name=property_name, model_type=self.field_name_model_type
+                )
+                discriminator["propertyName"] = field_name
+                mapping = discriminator.get("mapping", {})
+                for data_type in field.data_type.data_types:
+                    if not data_type.reference:  # pragma: no cover
+                        continue
+                    discriminator_model = data_type.reference.source
+
+                    if not isinstance(  # pragma: no cover
+                        discriminator_model,
+                        (
+                            pydantic_model.BaseModel,
+                            pydantic_model_v2.BaseModel,
+                            dataclass_model.DataClass,
+                            msgspec_model.Struct,
+                        ),
+                    ):
+                        continue  # pragma: no cover
+
+                    type_names: list[str] = []
+
+                    def check_paths(
+                        model: pydantic_model.BaseModel | pydantic_model_v2.BaseModel | Reference,
+                        mapping: dict[str, str],
+                        type_names: list[str] = type_names,
+                    ) -> None:
+                        """Validate discriminator mapping paths for a model."""
+                        for name, path in mapping.items():
+                            if (model.path.split("#/")[-1] != path.split("#/")[-1]) and (
+                                path.startswith("#/") or model.path[:-1] != path.split("/")[-1]
+                            ):
+                                t_path = path[str(path).find("/") + 1 :]
+                                t_disc = model.path[: str(model.path).find("#")].lstrip("../")  # noqa: B005
+                                t_disc_2 = "/".join(t_disc.split("/")[1:])
+                                if t_path not in {t_disc, t_disc_2}:
+                                    continue
+                            type_names.append(name)
+
+                    # First try to get the discriminator value from the const field
+                    for discriminator_field in discriminator_model.fields:
+                        if field_name not in {discriminator_field.original_name, discriminator_field.name}:
+                            continue
+                        if discriminator_field.extras.get("const"):
+                            type_names = [discriminator_field.extras["const"]]
+                            break
+
+                    # If no const value found, try to get it from the mapping
+                    if not type_names:
+                        # Check the main discriminator model path
+                        if mapping:
+                            check_paths(discriminator_model, mapping)  # pyright: ignore[reportArgumentType]
+
+                            # Check the base_classes if they exist
+                            if len(type_names) == 0:
+                                for base_class in discriminator_model.base_classes:
+                                    check_paths(base_class.reference, mapping)  # pyright: ignore[reportArgumentType]
+                        else:
+                            for discriminator_field in discriminator_model.fields:
+                                if field_name not in {discriminator_field.original_name, discriminator_field.name}:
+                                    continue
+
+                                literals = discriminator_field.data_type.literals
+                                if literals and len(literals) == 1:  # pragma: no cover
+                                    type_names = [str(v) for v in literals]
+                                    break
+
+                                enum_source = discriminator_field.data_type.find_source(Enum)
+                                if enum_source and len(enum_source.fields) == 1:
+                                    first_field = enum_source.fields[0]
+                                    raw_default = first_field.default
+                                    if isinstance(raw_default, str):
+                                        type_names = [raw_default.strip("'\"")]
+                                    else:  # pragma: no cover
+                                        type_names = [str(raw_default)]
+                                    break
+
+                            if not type_names:
+                                type_names = [discriminator_model.path.split("/")[-1]]
+
+                    if not type_names:  # pragma: no cover
+                        msg = f"Discriminator type is not found. {data_type.reference.path}"
+                        raise RuntimeError(msg)
+
+                    enum_from_base: Enum | None = None
+                    if self.use_enum_values_in_discriminator:
+                        for base_class in discriminator_model.base_classes:
+                            if not base_class.reference or not base_class.reference.source:  # pragma: no cover
+                                continue
+                            base_model = base_class.reference.source
+                            if not isinstance(  # pragma: no cover
+                                base_model,
+                                (
+                                    pydantic_model.BaseModel,
+                                    pydantic_model_v2.BaseModel,
+                                    dataclass_model.DataClass,
+                                    msgspec_model.Struct,
+                                ),
+                            ):
+                                continue
+                            for base_field in base_model.fields:  # pragma: no branch
+                                if field_name not in {base_field.original_name, base_field.name}:  # pragma: no cover
+                                    continue
+                                enum_from_base = base_field.data_type.find_source(Enum)
+                                if enum_from_base:  # pragma: no branch
+                                    break
+                            if enum_from_base:  # pragma: no branch
+                                break
+
+                    has_one_literal = False
+                    for discriminator_field in discriminator_model.fields:
+                        if field_name not in {discriminator_field.original_name, discriminator_field.name}:
+                            continue
+                        literals = discriminator_field.data_type.literals
+                        const_value = discriminator_field.extras.get("const")
+                        expected_value = type_names[0] if type_names else None
+
+                        # Check if literals match (existing behavior)
+                        literals_match = len(literals) == 1 and literals[0] == expected_value
+                        # Check if const value matches (for msgspec with type: string + const)
+                        const_match = const_value is not None and const_value == expected_value
+
+                        if literals_match:
+                            has_one_literal = True
+                            if isinstance(discriminator_model, msgspec_model.Struct):  # pragma: no cover
+                                discriminator_model.add_base_class_kwarg("tag_field", f"'{field_name}'")
+                                discriminator_model.add_base_class_kwarg("tag", discriminator_field.represented_default)
+                                discriminator_field.extras["is_classvar"] = True
+                            # Found the discriminator field, no need to keep looking
+                            break
+
+                        # For msgspec with const value but no literal (type: string + const case)
+                        if const_match and isinstance(discriminator_model, msgspec_model.Struct):  # pragma: no cover
+                            has_one_literal = True
+                            discriminator_model.add_base_class_kwarg("tag_field", f"'{field_name}'")
+                            discriminator_model.add_base_class_kwarg("tag", repr(const_value))
+                            discriminator_field.extras["is_classvar"] = True
+                            break
+
+                        enum_source: Enum | None = None
+                        if self.use_enum_values_in_discriminator:
+                            enum_source = (  # pragma: no cover
+                                discriminator_field.data_type.find_source(Enum) or enum_from_base
+                            )
+
+                        for field_data_type in discriminator_field.data_type.all_data_types:
+                            if field_data_type.reference:  # pragma: no cover
+                                field_data_type.remove_reference()
+
+                        discriminator_field.data_type = self._create_discriminator_data_type(
+                            enum_source, type_names, discriminator_model, imports
+                        )
+                        discriminator_field.data_type.parent = discriminator_field
+                        discriminator_field.required = True
+                        imports.append(discriminator_field.imports)
+                        has_one_literal = True
+                    if not has_one_literal:
+                        new_data_type = self._create_discriminator_data_type(
+                            enum_from_base, type_names, discriminator_model, imports
+                        )
+                        discriminator_model.fields.append(
+                            self.data_model_field_type(
+                                name=field_name,
+                                data_type=new_data_type,
+                                required=True,
+                                alias=alias,
+                            )
+                        )
+            has_imported_literal = any(import_ == IMPORT_LITERAL for import_ in imports)
+            if has_imported_literal:  # pragma: no cover
+                imports.append(IMPORT_LITERAL)
+
+    @classmethod
+    def _create_set_from_list(cls, data_type: DataType) -> DataType | None:
+        if data_type.is_list:
+            new_data_type = data_type.copy()
+            new_data_type.is_list = False
+            new_data_type.is_set = True
+            for data_type_ in new_data_type.data_types:
+                data_type_.parent = new_data_type
+            return new_data_type
+        if data_type.data_types:  # pragma: no cover
+            for nested_data_type in data_type.data_types[:]:
+                set_data_type = cls._create_set_from_list(nested_data_type)
+                if set_data_type:  # pragma: no cover
+                    nested_data_type.swap_with(set_data_type)
+            return data_type
+        return None  # pragma: no cover
+
+    def __replace_unique_list_to_set(self, models: list[DataModel]) -> None:
+        for model in models:
+            for model_field in model.fields:
+                if not self.use_unique_items_as_set:
+                    continue
+
+                if not (model_field.constraints and model_field.constraints.unique_items):
+                    continue
+                set_data_type = self._create_set_from_list(model_field.data_type)
+                if set_data_type:  # pragma: no cover
+                    # Check if default list elements are hashable before converting type
+                    if isinstance(model_field.default, list):
+                        try:
+                            converted_default = set(model_field.default)
+                        except TypeError:
+                            # Elements are not hashable (e.g., contains dicts)
+                            # Skip both type and default conversion to keep consistency
+                            continue
+                        model_field.default = converted_default
+                    model_field.replace_data_type(set_data_type)
+
+    @classmethod
+    def __set_reference_default_value_to_field(cls, models: list[DataModel]) -> None:
+        for model in models:
+            for model_field in model.fields:
+                if not model_field.data_type.reference or model_field.has_default:
+                    continue
+                if (
+                    isinstance(model_field.data_type.reference.source, DataModel)
+                    and model_field.data_type.reference.source.default != UNDEFINED
+                ):
+                    # pragma: no cover
+                    model_field.default = model_field.data_type.reference.source.default
+
+    def __reuse_model(self, models: list[DataModel], require_update_action_models: list[str]) -> None:
+        if not self.reuse_model or self.reuse_scope == ReuseScope.Tree:
+            return
+        model_cache: dict[tuple[HashableComparable, ...], Reference] = {}
+        duplicates = []
+        for model in models.copy():
+            model_key = model.get_dedup_key()
+            cached_model_reference = model_cache.get(model_key)
+            if cached_model_reference:
+                if isinstance(model, Enum):
+                    model.replace_children_in_models(models, cached_model_reference)
+                    duplicates.append(model)
+                else:
+                    inherited_model = model.create_reuse_model(cached_model_reference)
+                    if cached_model_reference.path in require_update_action_models:
+                        add_model_path_to_list(require_update_action_models, inherited_model)
+                    self._replace_model_in_list(models, model, inherited_model)
+            else:
+                model_cache[model_key] = model.reference
+
+        for duplicate in duplicates:
+            models.remove(duplicate)
+
+    def __find_duplicate_models_across_modules(  # noqa: PLR6301
+        self,
+        module_models: list[tuple[tuple[str, ...], list[DataModel]]],
+    ) -> list[tuple[tuple[str, ...], DataModel, tuple[str, ...], DataModel]]:
+        """Find duplicate models across all modules by comparing render output and imports."""
+        all_models: list[tuple[tuple[str, ...], DataModel]] = []
+        for module, models in module_models:
+            all_models.extend((module, model) for model in models)
+
+        model_cache: dict[tuple[HashableComparable, ...], tuple[tuple[str, ...], DataModel]] = {}
+        duplicates: list[tuple[tuple[str, ...], DataModel, tuple[str, ...], DataModel]] = []
+
+        for module, model in all_models:
+            model_key = model.get_dedup_key()
+            cached = model_cache.get(model_key)
+            if cached:
+                canonical_module, canonical_model = cached
+                duplicates.append((module, model, canonical_module, canonical_model))
+            else:
+                model_cache[model_key] = (module, model)
+
+        return duplicates
+
+    def __validate_shared_module_name(
+        self,
+        module_models: list[tuple[tuple[str, ...], list[DataModel]]],
+    ) -> None:
+        """Validate that the shared module name doesn't conflict with existing modules."""
+        shared_module = self.shared_module_name
+        existing_module_names = {module[0] for module, _ in module_models}
+        if shared_module in existing_module_names:
+            msg = (
+                f"Schema file or directory '{shared_module}' conflicts with the shared module name. "
+                f"Use --shared-module-name to specify a different name."
+            )
+            raise Error(msg)
+
+    def __create_shared_module_from_duplicates(  # noqa: PLR0912
+        self,
+        module_models: list[tuple[tuple[str, ...], list[DataModel]]],
+        duplicates: list[tuple[tuple[str, ...], DataModel, tuple[str, ...], DataModel]],
+        require_update_action_models: list[str],
+    ) -> tuple[tuple[str, ...], list[DataModel]]:
+        """Create shared module with canonical models and replace duplicates with inherited models."""
+        shared_module = self.shared_module_name
+
+        shared_models: list[DataModel] = []
+        canonical_to_shared_ref: dict[DataModel, Reference] = {}
+        canonical_models_seen: set[DataModel] = set()
+
+        # Process in order of first appearance in duplicates to ensure stable ordering
+        for _, _, _, canonical in duplicates:
+            if canonical in canonical_models_seen:
+                continue
+            canonical_models_seen.add(canonical)
+            canonical.file_path = Path(f"{shared_module}.py")
+            canonical_to_shared_ref[canonical] = canonical.reference
+            shared_models.append(canonical)
+
+        supports_inheritance = issubclass(
+            self.data_model_type,
+            (
+                pydantic_model.BaseModel,
+                pydantic_model_v2.BaseModel,
+                dataclass_model.DataClass,
+            ),
+        )
+
+        for duplicate_module, duplicate_model, _, canonical_model in duplicates:
+            shared_ref = canonical_to_shared_ref[canonical_model]
+            for module, models in module_models:
+                if module != duplicate_module or duplicate_model not in models:
+                    continue
+                if isinstance(duplicate_model, Enum) or not supports_inheritance:
+                    duplicate_model.replace_children_in_models(models, shared_ref)
+                    models.remove(duplicate_model)
+                else:
+                    inherited_model = duplicate_model.create_reuse_model(shared_ref)
+                    if shared_ref.path in require_update_action_models:
+                        add_model_path_to_list(require_update_action_models, inherited_model)
+                    self._replace_model_in_list(models, duplicate_model, inherited_model)
+                break
+            else:  # pragma: no cover
+                msg = f"Duplicate model {duplicate_model.name} not found in module {duplicate_module}"
+                raise RuntimeError(msg)
+
+        for canonical in canonical_models_seen:
+            for _module, models in module_models:
+                if canonical in models:
+                    models.remove(canonical)
+                    break
+            else:  # pragma: no cover
+                msg = f"Canonical model {canonical.name} not found in any module"
+                raise RuntimeError(msg)
+
+        return (shared_module,), shared_models
+
+    def __reuse_model_tree_scope(
+        self,
+        module_models: list[tuple[tuple[str, ...], list[DataModel]]],
+        require_update_action_models: list[str],
+    ) -> tuple[tuple[str, ...], list[DataModel]] | None:
+        """Deduplicate models across all modules, placing shared models in shared.py."""
+        if not self.reuse_model or self.reuse_scope != ReuseScope.Tree:
+            return None
+
+        duplicates = self.__find_duplicate_models_across_modules(module_models)
+        if not duplicates:
+            return None
+
+        self.__validate_shared_module_name(module_models)
+        return self.__create_shared_module_from_duplicates(module_models, duplicates, require_update_action_models)
+
+    def __collapse_root_models(  # noqa: PLR0912
+        self,
+        models: list[DataModel],
+        unused_models: list[DataModel],
+        imports: Imports,
+        scoped_model_resolver: ModelResolver,
+    ) -> None:
+        if not self.collapse_root_models:
+            return
+
+        for model in models:  # noqa: PLR1702
+            for model_field in model.fields:
+                for data_type in model_field.data_type.all_data_types:
+                    reference = data_type.reference
+                    if not reference or not isinstance(reference.source, self.data_model_root_type):
+                        # If the data type is not a reference, we can't collapse it.
+                        # If it's a reference to a root model type, we don't do anything.
+                        continue
+
+                    # Use root-type as model_field type
+                    root_type_model = reference.source
+                    root_type_field = root_type_model.fields[0]
+
+                    if (
+                        self.field_constraints
+                        and isinstance(root_type_field.constraints, ConstraintsBase)
+                        and root_type_field.constraints.has_constraints
+                        and any(d for d in model_field.data_type.all_data_types if d.is_dict or d.is_union or d.is_list)
+                    ):
+                        continue  # pragma: no cover
+
+                    if root_type_field.data_type.reference:
+                        # If the root type field is a reference, we aren't able to collapse it yet.
+                        continue
+
+                    # set copied data_type
+                    copied_data_type = root_type_field.data_type.copy()
+                    if isinstance(data_type.parent, self.data_model_field_type):
+                        # for field
+                        # override empty field by root-type field
+                        model_field.extras = {
+                            **root_type_field.extras,
+                            **model_field.extras,
+                        }
+                        model_field.process_const()
+
+                        if self.field_constraints:
+                            model_field.constraints = ConstraintsBase.merge_constraints(
+                                root_type_field.constraints, model_field.constraints
+                            )
+
+                        data_type.parent.data_type = copied_data_type
+
+                    elif isinstance(data_type.parent, DataType) and data_type.parent.is_list:
+                        if self.field_constraints:
+                            model_field.constraints = ConstraintsBase.merge_constraints(
+                                root_type_field.constraints, model_field.constraints
+                            )
+                        if (  # pragma: no cover
+                            isinstance(
+                                root_type_field,
+                                pydantic_model.DataModelField,
+                            )
+                            and not model_field.extras.get("discriminator")
+                            and not any(t.is_list for t in model_field.data_type.data_types)
+                        ):
+                            discriminator = root_type_field.extras.get("discriminator")
+                            if discriminator:
+                                model_field.extras["discriminator"] = discriminator
+                        assert isinstance(data_type.parent, DataType)
+                        data_type.parent.data_types.remove(data_type)  # pragma: no cover
+                        data_type.parent.data_types.append(copied_data_type)
+
+                    elif isinstance(data_type.parent, DataType):
+                        # for data_type
+                        data_type_id = id(data_type)
+                        data_type.parent.data_types = [
+                            d for d in (*data_type.parent.data_types, copied_data_type) if id(d) != data_type_id
+                        ]
+                    else:  # pragma: no cover
+                        continue
+
+                    for d in copied_data_type.all_data_types:
+                        if d.reference is None:
+                            continue
+                        from_, import_ = full_path = relative(model.module_name, d.full_name)
+                        if from_ and import_:
+                            alias = scoped_model_resolver.add(full_path, import_)
+                            d.alias = (
+                                alias.name
+                                if d.reference.short_name == import_
+                                else f"{alias.name}.{d.reference.short_name}"
+                            )
+                            imports.append([
+                                Import(
+                                    from_=from_,
+                                    import_=import_,
+                                    alias=alias.name,
+                                    reference_path=d.reference.path,
+                                )
+                            ])
+
+                    original_field = get_most_of_parent(data_type, DataModelFieldBase)
+                    if original_field:  # pragma: no cover
+                        # TODO: Improve detection of reference type
+                        # Use list instead of set because Import is not hashable
+                        excluded_imports = [IMPORT_OPTIONAL, IMPORT_UNION]
+                        field_imports = [i for i in original_field.imports if i not in excluded_imports]
+                        imports.append(field_imports)
+
+                    data_type.remove_reference()
+
+                    assert isinstance(root_type_model, DataModel)
+                    root_type_model.reference.children = [
+                        c for c in root_type_model.reference.children if getattr(c, "parent", None)
+                    ]
+
+                    imports.remove_referenced_imports(root_type_model.path)
+                    if not root_type_model.reference.children:
+                        unused_models.append(root_type_model)
+
+    def __set_default_enum_member(
+        self,
+        models: list[DataModel],
+    ) -> None:
+        if not self.set_default_enum_member:
+            return
+        for _, model_field, data_type in iter_models_field_data_types(models):
+            if not model_field.default:
+                continue
+            if data_type.reference and isinstance(data_type.reference.source, Enum):  # pragma: no cover
+                if isinstance(model_field.default, list):
+                    enum_member: list[Member] | (Member | None) = [
+                        e for e in (data_type.reference.source.find_member(d) for d in model_field.default) if e
+                    ]
+                else:
+                    enum_member = data_type.reference.source.find_member(model_field.default)
+                if not enum_member:
+                    continue
+                model_field.default = enum_member
+                if data_type.alias:
+                    if isinstance(enum_member, list):
+                        for enum_member_ in enum_member:
+                            enum_member_.alias = data_type.alias
+                    else:
+                        enum_member.alias = data_type.alias
+
+    def __wrap_root_model_default_values(
+        self,
+        models: list[DataModel],
+    ) -> None:
+        """Wrap RootModel reference default values with their type constructors."""
+        if not self.use_annotated:
+            return
+        for model, model_field, data_type in iter_models_field_data_types(models):
+            if isinstance(model, (Enum, self.data_model_root_type)):
+                continue
+            if model_field.default is None:
+                continue
+            if isinstance(model_field.default, (WrappedDefault, Member)):
+                continue
+            if isinstance(model_field.default, list):
+                continue
+            if data_type.reference and isinstance(data_type.reference.source, pydantic_model_v2.RootModel):
+                # Use alias if available (handles import collisions)
+                type_name = data_type.alias or data_type.reference.short_name
+                model_field.default = WrappedDefault(
+                    value=model_field.default,
+                    type_name=type_name,
+                )
+
+    def __override_required_field(
+        self,
+        models: list[DataModel],
+    ) -> None:
+        for model in models:
+            if isinstance(model, (Enum, self.data_model_root_type)):
+                continue
+            for index, model_field in enumerate(model.fields[:]):
+                data_type = model_field.data_type
+                if (
+                    not model_field.original_name  # noqa: PLR0916
+                    or data_type.data_types
+                    or data_type.reference
+                    or data_type.type
+                    or data_type.literals
+                    or data_type.dict_key
+                ):
+                    continue
+
+                original_field = _find_field(model_field.original_name, _find_base_classes(model))
+                if not original_field:  # pragma: no cover
+                    model.fields.remove(model_field)
+                    continue
+                copied_original_field = original_field.copy()
+                if original_field.data_type.reference:
+                    data_type = self.data_type_manager.data_type(
+                        reference=original_field.data_type.reference,
+                    )
+                elif original_field.data_type.data_types:
+                    data_type = original_field.data_type.copy()
+                    data_type.data_types = _copy_data_types(original_field.data_type.data_types)
+                    for data_type_ in data_type.data_types:
+                        data_type_.parent = data_type
+                else:
+                    data_type = original_field.data_type.copy()
+                data_type.parent = copied_original_field
+                copied_original_field.data_type = data_type
+                copied_original_field.parent = model
+                copied_original_field.required = True
+                model.fields.insert(index, copied_original_field)
+                model.fields.remove(model_field)
+
+    def __sort_models(
+        self,
+        models: list[DataModel],
+        imports: Imports,
+        *,
+        use_deferred_annotations: bool,
+    ) -> None:
+        if not self.keep_model_order:
+            return
+
+        _reorder_models_keep_model_order(models, imports, use_deferred_annotations=use_deferred_annotations)
+
+    def __change_field_name(
+        self,
+        models: list[DataModel],
+    ) -> None:
+        if not issubclass(self.data_model_type, pydantic_model_v2.BaseModel):
+            return
+        for model in models:
+            if "Enum" in model.base_class:
+                continue
+
+            for field in model.fields:
+                filed_name = field.name
+                filed_name_resolver = ModelResolver(snake_case_field=self.snake_case_field, remove_suffix_number=True)
+                for data_type in field.data_type.all_data_types:
+                    if data_type.reference:
+                        filed_name_resolver.exclude_names.add(data_type.reference.short_name)
+                new_filed_name = filed_name_resolver.add(["field"], cast("str", filed_name)).name
+                if filed_name != new_filed_name:
+                    field.alias = filed_name
+                    field.name = new_filed_name
+
+    def __set_one_literal_on_default(self, models: list[DataModel]) -> None:
+        if not self.use_one_literal_as_default:
+            return
+        for model in models:
+            for model_field in model.fields:
+                if not model_field.required or len(model_field.data_type.literals) != 1:
+                    continue
+                model_field.default = model_field.data_type.literals[0]
+                model_field.required = False
+                if model_field.nullable is not True:  # pragma: no cover
+                    model_field.nullable = False
+
+    def __fix_dataclass_field_ordering(self, models: list[DataModel]) -> None:
+        """Fix field ordering for dataclasses with inheritance after defaults are set."""
+        for model in models:
+            if (inherited := self.__get_dataclass_inherited_info(model)) is None:
+                continue
+            inherited_names, has_default = inherited
+            if not has_default or not any(self.__is_new_required_field(f, inherited_names) for f in model.fields):
+                continue
+
+            if self.target_python_version.has_kw_only_dataclass:
+                for field in model.fields:
+                    if self.__is_new_required_field(field, inherited_names):
+                        field.extras["kw_only"] = True
+            else:
+                warn(
+                    f"Dataclass '{model.class_name}' has a field ordering conflict due to inheritance. "
+                    f"An inherited field has a default value, but new required fields are added. "
+                    f"This will cause a TypeError at runtime. Consider using --target-python-version 3.10 "
+                    f"or higher to enable automatic field(kw_only=True) fix.",
+                    category=UserWarning,
+                    stacklevel=2,
+                )
+            model.fields = sorted(model.fields, key=dataclass_model.has_field_assignment)
+
+    @classmethod
+    def __get_dataclass_inherited_info(cls, model: DataModel) -> tuple[set[str], bool] | None:
+        """Get inherited field names and whether any has default. Returns None if not applicable."""
+        if not isinstance(model, dataclass_model.DataClass):
+            return None
+        if not model.base_classes or model.dataclass_arguments.get("kw_only"):
+            return None
+
+        inherited_names: set[str] = set()
+        has_default = False
+        for base in model.base_classes:
+            if not base.reference or not isinstance(base.reference.source, DataModel):
+                continue  # pragma: no cover
+            for f in base.reference.source.iter_all_fields():
+                if not f.name or f.extras.get("init") is False:
+                    continue  # pragma: no cover
+                inherited_names.add(f.name)
+                if dataclass_model.has_field_assignment(f):
+                    has_default = True
+
+        for f in model.fields:
+            if f.name not in inherited_names or f.extras.get("init") is False:
+                continue
+            if dataclass_model.has_field_assignment(f):  # pragma: no branch
+                has_default = True
+        return (inherited_names, has_default) if inherited_names else None
+
+    def __is_new_required_field(self, field: DataModelFieldBase, inherited: set[str]) -> bool:  # noqa: PLR6301
+        """Check if field is a new required init field."""
+        return (
+            field.name not in inherited
+            and field.extras.get("init") is not False
+            and not dataclass_model.has_field_assignment(field)
+        )
+
+    @classmethod
+    def __update_type_aliases(cls, models: list[DataModel]) -> None:
+        """Update type aliases to properly handle forward references per PEP 484."""
+        model_index: dict[str, int] = {m.class_name: i for i, m in enumerate(models)}
+
+        for i, model in enumerate(models):
+            if not isinstance(model, TypeAliasBase):
+                continue
+            if isinstance(model, TypeStatement):
+                continue
+
+            for field in model.fields:
+                for data_type in field.data_type.all_data_types:
+                    if not data_type.reference:
+                        continue
+                    source = data_type.reference.source
+                    if not isinstance(source, DataModel):
+                        continue  # pragma: no cover
+                    if isinstance(source, TypeStatement):
+                        continue  # pragma: no cover
+                    if source.module_path != model.module_path:
+                        continue
+                    name = data_type.reference.short_name
+                    source_index = model_index.get(name)
+                    if source_index is not None and source_index >= i:
+                        data_type.alias = f'"{name}"'
+
+    @classmethod
+    def __postprocess_result_modules(cls, results: dict[tuple[str, ...], Result]) -> dict[tuple[str, ...], Result]:
+        def process(input_tuple: tuple[str, ...]) -> tuple[str, ...]:
+            r = []
+            for item in input_tuple:
+                p = item.split(".")
+                if len(p) > 1:
+                    r.extend(p[:-1])
+                    r.append(p[-1])
+                else:
+                    r.append(item)
+
+            if len(r) >= 2:  # noqa: PLR2004
+                r = [*r[:-2], f"{r[-2]}.{r[-1]}"]
+            return tuple(r)
+
+        results = {process(k): v for k, v in results.items()}
+
+        init_result = next(v for k, v in results.items() if k[-1] == "__init__.py")
+        folders = {t[:-1] if t[-1].endswith(".py") else t for t in results}
+        for folder in folders:
+            for i in range(len(folder)):
+                subfolder = folder[: i + 1]
+                init_file = (*subfolder, "__init__.py")
+                results.update({init_file: init_result})
+        return results
+
+    def __change_imported_model_name(  # noqa: PLR6301
+        self,
+        models: list[DataModel],
+        imports: Imports,
+        scoped_model_resolver: ModelResolver,
+    ) -> None:
+        imported_names = {
+            imports.alias[from_][i] if i in imports.alias[from_] and i != imports.alias[from_][i] else i
+            for from_, import_ in imports.items()
+            for i in import_
+        }
+        for model in models:
+            if model.class_name not in imported_names:  # pragma: no cover
+                continue
+
+            model.reference.name = scoped_model_resolver.add(  # pragma: no cover
+                path=get_special_path("imported_name", model.path.split("/")),
+                original_name=model.reference.name,
+                unique=True,
+                class_name=True,
+            ).name
+
+    def __alias_shadowed_imports(  # noqa: PLR6301
+        self,
+        models: list[DataModel],
+        all_model_field_names: set[str],
+    ) -> None:
+        for _, model_field, data_type in iter_models_field_data_types(models):
+            if data_type and data_type.type in all_model_field_names and data_type.type == model_field.name:
+                alias = data_type.type + "_aliased"
+                data_type.type = alias
+                if data_type.import_:  # pragma: no cover
+                    data_type.import_ = Import(
+                        from_=data_type.import_.from_,
+                        import_=data_type.import_.import_,
+                        alias=alias,
+                        reference_path=data_type.import_.reference_path,
+                    )
+
+    @classmethod
+    def _collect_exports_for_init(
+        cls,
+        module: tuple[str, ...],
+        processed_models: Sequence[
+            tuple[tuple[str, ...], tuple[str, ...], Sequence[DataModel], bool, Imports, ModelResolver]
+        ],
+        scope: AllExportsScope,
+    ) -> list[tuple[str, tuple[str, ...], str]]:
+        """Collect exports for __init__.py based on scope."""
+        exports: list[tuple[str, tuple[str, ...], str]] = []
+        base = module[:-1] if module[-1] == "__init__.py" else module
+        base_len = len(base)
+
+        for proc_module, _, proc_models, _, _, _ in processed_models:
+            if not proc_models or proc_module == module:
+                continue
+            last = proc_module[-1]
+            prefix = proc_module[:-1] if last == "__init__.py" else (*proc_module[:-1], last[:-3])
+            if prefix[:base_len] != base or (depth := len(prefix) - base_len) < 1:
+                continue
+            if scope == AllExportsScope.Children and depth != 1:
+                continue
+            rel = prefix[base_len:]
+            exports.extend(
+                (ref.short_name, rel, ".".join(rel))
+                for m in proc_models
+                if (ref := m.reference) and not ref.short_name.startswith("_")
+            )
+        return exports
+
+    @classmethod
+    def _resolve_export_collisions(
+        cls,
+        exports: list[tuple[str, tuple[str, ...], str]],
+        strategy: AllExportsCollisionStrategy | None,
+        reserved: set[str] | None = None,
+    ) -> dict[str, list[tuple[str, tuple[str, ...], str]]]:
+        """Resolve name collisions in exports based on strategy."""
+        reserved = reserved or set()
+        by_name: dict[str, list[tuple[str, tuple[str, ...], str]]] = {}
+        for item in exports:
+            by_name.setdefault(item[0], []).append(item)
+
+        if not (colliding := {n for n, items in by_name.items() if len(items) > 1 or n in reserved}):
+            return dict(by_name)
+        if (effective := strategy or AllExportsCollisionStrategy.Error) == AllExportsCollisionStrategy.Error:
+            cls._raise_collision_error(by_name, colliding)
+
+        used: set[str] = {n for n in by_name if n not in colliding} | reserved
+        result = {n: items for n, items in by_name.items() if n not in colliding}
+
+        for name in sorted(colliding):
+            for item in sorted(by_name[name], key=lambda x: len(x[1])):
+                new_name = cls._make_prefixed_name(
+                    item[0], item[1], used, minimal=effective == AllExportsCollisionStrategy.MinimalPrefix
+                )
+                if new_name in reserved:
+                    msg = (
+                        f"Cannot resolve collision: '{new_name}' conflicts with __init__.py model. "
+                        "Please rename one of the models."
+                    )
+                    raise Error(msg)
+                result[new_name] = [item]
+                used.add(new_name)
+        return result
+
+    @classmethod
+    def _raise_collision_error(
+        cls,
+        by_name: dict[str, list[tuple[str, tuple[str, ...], str]]],
+        colliding: set[str],
+    ) -> None:
+        """Raise an error with collision details."""
+        details = []
+        for n in colliding:
+            if len(items := by_name[n]) > 1:
+                details.append(f"  '{n}' is defined in: {', '.join(f'.{s}' for _, _, s in items)}")
+            else:
+                details.append(f"  '{n}' conflicts with a model in __init__.py")
+        raise Error(
+            "Name collision detected with --all-exports-scope:\n"
+            + "\n".join(details)
+            + "\n\nUse --all-exports-collision-strategy to specify how to handle collisions."
+        )
+
+    @staticmethod
+    def _make_prefixed_name(name: str, path: tuple[str, ...], used: set[str], *, minimal: bool) -> str:
+        """Generate a prefixed name, using minimal or full prefix."""
+        if minimal:
+            for depth in range(1, len(path) + 1):
+                if (candidate := "".join(p.title().replace("_", "") for p in path[-depth:]) + name) not in used:
+                    return candidate
+        return "".join(p.title().replace("_", "") for p in path) + name
+
+    @classmethod
+    def _build_all_exports_code(
+        cls,
+        resolved: dict[str, list[tuple[str, tuple[str, ...], str]]],
+    ) -> Imports:
+        """Build import statements from resolved exports."""
+        export_imports = Imports()
+        for export_name, items in resolved.items():
+            for orig, _, short in items:
+                export_imports.append(
+                    Import(from_=f".{short}", import_=orig, alias=export_name if export_name != orig else None)
+                )
+        return export_imports
+
+    @classmethod
+    def _collect_used_names_from_models(cls, models: list[DataModel]) -> set[str]:
+        """Collect identifiers referenced by models before rendering."""
+        names: set[str] = set()
+
+        def add(name: str | None) -> None:
+            if not name:
+                return
+            # first segment is sufficient to match import target or alias
+            names.add(name.split(".")[0])
+
+        def walk_data_type(data_type: DataType) -> None:
+            add(data_type.alias or data_type.type)
+            if data_type.reference:
+                add(data_type.reference.short_name)
+            for child in data_type.data_types:
+                walk_data_type(child)
+            if data_type.dict_key:
+                walk_data_type(data_type.dict_key)
+
+        for model in models:
+            add(model.class_name)
+            add(model.duplicate_class_name)
+            for base in model.base_classes:
+                add(base.type_hint)
+            for import_ in model.imports:
+                add(import_.alias or import_.import_.split(".")[-1])
+            for field in model.fields:
+                if field.extras.get("is_classvar"):
+                    continue
+                add(field.name)
+                add(field.alias)
+                walk_data_type(field.data_type)
+        return names
+
+    def __generate_forwarder_content(  # noqa: PLR6301
+        self,
+        original_module: tuple[str, ...],
+        internal_module: tuple[str, ...],
+        class_mappings: list[tuple[str, str]],
+        *,
+        is_init: bool = False,
+    ) -> str:
+        """Generate forwarder module content that re-exports classes from _internal.
+
+        Args:
+            original_module: The original module tuple (e.g., ("issuing",) or ())
+            internal_module: The _internal module tuple (e.g., ("_internal",))
+            class_mappings: List of (original_name, new_name) tuples, sorted by original_name
+            is_init: True if this is a package __init__.py, False for regular .py files
+
+        Returns:
+            The forwarder module content as a string
+        """
+        original_str = ".".join(original_module)
+        internal_str = ".".join(internal_module)
+        from_dots, module_name = relative(original_str, internal_str, reference_is_module=True, current_is_init=is_init)
+        relative_import = f"{from_dots}{module_name}"
+
+        imports = Imports()
+        for original_name, new_name in class_mappings:
+            if original_name == new_name:
+                imports.append(Import(from_=relative_import, import_=new_name))
+            else:
+                imports.append(Import(from_=relative_import, import_=new_name, alias=original_name))
+
+        return f"{imports.dump()}\n\n{imports.dump_all()}\n"
+
+    def __compute_internal_module_path(  # noqa: PLR6301
+        self,
+        scc_modules: set[tuple[str, ...]],
+        existing_modules: set[tuple[str, ...]],
+        *,
+        base_name: str = "_internal",
+    ) -> tuple[str, ...]:
+        """Compute the internal module path for an SCC."""
+        directories = [get_module_directory(m) for m in sorted(scc_modules)]
+
+        if not directories or any(not d for d in directories):
+            prefix: tuple[str, ...] = ()
+        else:
+            path_strings = ["/".join(d) for d in directories]
+            common = os.path.commonpath(path_strings)
+            prefix = tuple(common.split("/")) if common else ()
+
+        base_module = (base_name,) if not prefix else (*prefix, base_name)
+
+        if base_module in existing_modules:
+            counter = 1
+            while True:
+                candidate = (*prefix, f"{base_name}_{counter}") if prefix else (f"{base_name}_{counter}",)
+                if candidate not in existing_modules:
+                    return candidate
+                counter += 1
+
+        return base_module
+
+    def __collect_scc_models(  # noqa: PLR6301
+        self,
+        scc: set[tuple[str, ...]],
+        result_modules: dict[tuple[str, ...], list[DataModel]],
+    ) -> tuple[list[DataModel], dict[int, tuple[str, ...]]]:
+        """Collect all models from SCC modules.
+
+        Returns:
+            - List of all models in the SCC
+            - Mapping from model id to its original module
+        """
+        all_models: list[DataModel] = []
+        model_to_module: dict[int, tuple[str, ...]] = {}
+        for scc_module in sorted(scc):
+            for model in result_modules[scc_module]:
+                all_models.append(model)
+                model_to_module[id(model)] = scc_module
+        return all_models, model_to_module
+
+    def __rename_and_relocate_scc_models(  # noqa: PLR6301
+        self,
+        all_scc_models: list[DataModel],
+        model_to_original_module: dict[int, tuple[str, ...]],
+        internal_module: tuple[str, ...],
+        internal_path: Path,
+    ) -> tuple[defaultdict[tuple[str, ...], list[tuple[str, str]]], dict[str, str]]:
+        """Rename duplicate classes and relocate models to internal module.
+
+        Returns:
+            Tuple of:
+            - Mapping from original module to list of (original_name, new_name) tuples.
+            - Mapping from old reference paths to new reference paths.
+        """
+        class_name_counts = Counter(model.class_name for model in all_scc_models)
+        class_name_seen: dict[str, int] = {}
+        internal_module_str = ".".join(internal_module)
+        module_class_mappings: defaultdict[tuple[str, ...], list[tuple[str, str]]] = defaultdict(list)
+        path_mapping: dict[str, str] = {}
+
+        for model in all_scc_models:
+            original_class_name = model.class_name
+            original_module = model_to_original_module[id(model)]
+            old_path = model.path  # Save old path before updating
+
+            if class_name_counts[original_class_name] > 1:
+                seen_count = class_name_seen.get(original_class_name, 0)
+                new_class_name = f"{original_class_name}_{seen_count}" if seen_count > 0 else original_class_name
+                class_name_seen[original_class_name] = seen_count + 1
+            else:
+                new_class_name = original_class_name
+
+            model.reference.name = new_class_name
+            new_path = f"{internal_module_str}.{new_class_name}"
+            model.set_reference_path(new_path)
+            model.file_path = internal_path
+
+            module_class_mappings[original_module].append((original_class_name, new_class_name))
+            path_mapping[old_path] = new_path
+
+        return module_class_mappings, path_mapping
+
+    def __build_module_dependency_graph(  # noqa: PLR6301
+        self,
+        module_models_list: list[tuple[tuple[str, ...], list[DataModel]]],
+    ) -> dict[tuple[str, ...], set[tuple[str, ...]]]:
+        """Build a directed graph of module dependencies."""
+        path_to_module: dict[str, tuple[str, ...]] = {}
+        for module, models in module_models_list:
+            for model in models:
+                path_to_module[model.path] = module
+
+        graph: dict[tuple[str, ...], set[tuple[str, ...]]] = {}
+
+        def add_cross_module_edge(ref_path: str, source_module: tuple[str, ...]) -> None:
+            """Add edge if ref_path points to a different module."""
+            if ref_path in path_to_module:
+                target_module = path_to_module[ref_path]
+                if target_module != source_module:
+                    graph[source_module].add(target_module)
+
+        for module, models in module_models_list:
+            graph[module] = set()
+
+            for model in models:
+                for data_type in model.all_data_types:
+                    if data_type.reference and data_type.reference.source:
+                        add_cross_module_edge(data_type.reference.path, module)
+
+                for base_class in model.base_classes:
+                    if base_class.reference and base_class.reference.source:
+                        add_cross_module_edge(base_class.reference.path, module)
+
+        return graph
+
+    def __resolve_circular_imports(  # noqa: PLR0914
+        self,
+        module_models_list: list[tuple[tuple[str, ...], list[DataModel]]],
+    ) -> tuple[
+        list[tuple[tuple[str, ...], list[DataModel]]],
+        set[tuple[str, ...]],
+        dict[tuple[str, ...], tuple[tuple[str, ...], list[tuple[str, str]]]],
+        dict[str, str],
+    ]:
+        """Resolve circular imports by merging all SCCs into _internal.py modules.
+
+        Uses Tarjan's algorithm to find strongly connected components (SCCs) in the
+        module dependency graph. All modules in each SCC are merged into a single
+        _internal.py module to break import cycles. Original modules become thin
+        forwarders that re-export their classes from _internal.
+
+        Returns:
+            - Updated module_models_list with models moved to _internal modules
+            - Set of _internal modules created
+            - Forwarder map: original_module -> (internal_module, [(original_name, new_name)])
+            - Path mapping: old_reference_path -> new_reference_path
+        """
+        graph = self.__build_module_dependency_graph(module_models_list)
+
+        circular_sccs = find_circular_sccs(graph)
+
+        forwarder_map: dict[tuple[str, ...], tuple[tuple[str, ...], list[tuple[str, str]]]] = {}
+        all_path_mappings: dict[str, str] = {}
+
+        if not circular_sccs:
+            return module_models_list, set(), forwarder_map, all_path_mappings
+
+        # All circular SCCs are problematic and should be merged into _internal.py
+        # to break the import cycles.
+        problematic_sccs = circular_sccs
+
+        existing_modules = {module for module, _ in module_models_list}
+        internal_modules_created: set[tuple[str, ...]] = set()
+
+        result_modules: dict[tuple[str, ...], list[DataModel]] = {
+            module: list(models) for module, models in module_models_list
+        }
+
+        for scc in problematic_sccs:
+            internal_module = self.__compute_internal_module_path(scc, existing_modules | internal_modules_created)
+            internal_modules_created.add(internal_module)
+            internal_path = Path("/".join(internal_module))
+
+            all_scc_models, model_to_original_module = self.__collect_scc_models(scc, result_modules)
+            module_class_mappings, path_mapping = self.__rename_and_relocate_scc_models(
+                all_scc_models, model_to_original_module, internal_module, internal_path
+            )
+            all_path_mappings.update(path_mapping)
+
+            for scc_module in scc:
+                if scc_module in result_modules:  # pragma: no branch
+                    result_modules[scc_module] = []
+                if scc_module in module_class_mappings:  # pragma: no branch
+                    sorted_mappings = sorted(module_class_mappings[scc_module], key=operator.itemgetter(0))
+                    forwarder_map[scc_module] = (internal_module, sorted_mappings)
+            result_modules[internal_module] = all_scc_models
+
+        new_module_models: list[tuple[tuple[str, ...], list[DataModel]]] = [
+            (internal_module, result_modules[internal_module])
+            for internal_module in sorted(internal_modules_created)
+            if internal_module in result_modules  # pragma: no branch
+        ]
+
+        for module, _ in module_models_list:
+            if module not in internal_modules_created:  # pragma: no branch
+                new_module_models.append((module, result_modules.get(module, [])))
+
+        return new_module_models, internal_modules_created, forwarder_map, all_path_mappings
+
+    def __get_resolve_reference_action_parts(
+        self,
+        models: list[DataModel],
+        require_update_action_models: list[str],
+        *,
+        use_deferred_annotations: bool,
+    ) -> list[str]:
+        """Return the trailing rebuild/update calls for the given module's models."""
+        if self.dump_resolve_reference_action is None:
+            return []
+
+        require_update_action_model_paths = set(require_update_action_models)
+        required_paths_in_module = {m.path for m in models if m.path in require_update_action_model_paths}
+
+        if (
+            use_deferred_annotations
+            and required_paths_in_module
+            and self.dump_resolve_reference_action is pydantic_model_v2.dump_resolve_reference_action
+        ):
+            module_positions = {m.reference.short_name: i for i, m in enumerate(models) if m.reference}
+            module_model_names = set(module_positions)
+
+            forward_needed: set[str] = set()
+            for model in models:
+                if model.path not in required_paths_in_module or not model.reference:
+                    continue
+                name = model.reference.short_name
+                pos = module_positions[name]
+                refs = {
+                    t.reference.short_name
+                    for f in model.fields
+                    for t in f.data_type.all_data_types
+                    if t.reference and t.reference.short_name in module_model_names
+                }
+                if name in refs or any(module_positions.get(r, -1) > pos for r in refs):
+                    forward_needed.add(model.path)
+
+            # Propagate requirement through inheritance.
+            changed = True
+            required_filtered = set(forward_needed)
+            while changed:
+                changed = False
+                for model in models:
+                    if not model.reference or model.path in required_filtered:
+                        continue
+                    base_paths = {b.reference.path for b in model.base_classes if b.reference}
+                    if base_paths & required_filtered:
+                        required_filtered.add(model.path)
+                        changed = True
+
+            required_paths_in_module = required_filtered
+
+        return [
+            "\n",
+            self.dump_resolve_reference_action(
+                m.reference.short_name for m in models if m.reference and m.path in required_paths_in_module
+            ),
+        ]
+
+    def parse(  # noqa: PLR0912, PLR0913, PLR0914, PLR0915, PLR0917
+        self,
+        with_import: bool | None = True,  # noqa: FBT001, FBT002
+        format_: bool | None = True,  # noqa: FBT001, FBT002
+        settings_path: Path | None = None,
+        disable_future_imports: bool = False,  # noqa: FBT001, FBT002
+        all_exports_scope: AllExportsScope | None = None,
+        all_exports_collision_strategy: AllExportsCollisionStrategy | None = None,
+        module_split_mode: ModuleSplitMode | None = None,
+    ) -> str | dict[tuple[str, ...], Result]:
+        """Parse schema and generate code, returning single file or module dict."""
+        self.parse_raw()
+
+        use_deferred_annotations = bool(
+            self.target_python_version.has_native_deferred_annotations or (with_import and not disable_future_imports)
+        )
+
+        if (
+            with_import
+            and not disable_future_imports
+            and not self.target_python_version.has_native_deferred_annotations
+        ):
+            self.imports.append(IMPORT_ANNOTATIONS)
+
+        if format_:
+            code_formatter: CodeFormatter | None = CodeFormatter(
+                self.target_python_version,
+                settings_path,
+                self.wrap_string_literal,
+                skip_string_normalization=not self.use_double_quotes,
+                known_third_party=self.known_third_party,
+                custom_formatters=self.custom_formatter,
+                custom_formatters_kwargs=self.custom_formatters_kwargs,
+                encoding=self.encoding,
+                formatters=self.formatters,
+            )
+        else:
+            code_formatter = None
+
+        _, sorted_data_models, require_update_action_models = sort_data_models(self.results)
+
+        results: dict[tuple[str, ...], Result] = {}
+
+        def module_key(data_model: DataModel) -> tuple[str, ...]:
+            if module_split_mode == ModuleSplitMode.Single:
+                file_name = camel_to_snake(data_model.class_name)
+                return (*data_model.module_path, file_name)
+            return tuple(data_model.module_path)
+
+        def sort_key(data_model: DataModel) -> tuple[int, tuple[str, ...]]:
+            key = module_key(data_model)
+            return (len(key), key)
+
+        # process in reverse order to correctly establish module levels
+        grouped_models = groupby(
+            sorted(sorted_data_models.values(), key=sort_key, reverse=True),
+            key=module_key,
+        )
+
+        module_models: list[tuple[tuple[str, ...], list[DataModel]]] = []
+        unused_models: list[DataModel] = []
+        model_to_module_models: dict[DataModel, tuple[tuple[str, ...], list[DataModel]]] = {}
+        module_to_import: dict[tuple[str, ...], Imports] = {}
+        model_path_to_module_name: dict[str, str] = {}
+
+        previous_module: tuple[str, ...] = ()
+        for module, models in ((k, [*v]) for k, v in grouped_models):
+            for model in models:
+                model_to_module_models[model] = module, models
+                if module_split_mode == ModuleSplitMode.Single:
+                    model_path_to_module_name[model.path] = ".".join(module)
+            self.__delete_duplicate_models(models)
+            self.__replace_duplicate_name_in_module(models)
+            if len(previous_module) - len(module) > 1:
+                module_models.extend(
+                    (
+                        previous_module[:parts],
+                        [],
+                    )
+                    for parts in range(len(previous_module) - 1, len(module), -1)
+                )
+            module_models.append((
+                module,
+                models,
+            ))
+            previous_module = module
+
+        shared_module_entry = self.__reuse_model_tree_scope(module_models, require_update_action_models)
+        if shared_module_entry:
+            module_models.insert(0, shared_module_entry)
+
+        # Resolve circular imports by moving models to _internal.py modules
+        module_models, internal_modules, forwarder_map, path_mapping = self.__resolve_circular_imports(module_models)
+
+        # Update require_update_action_models with new paths for relocated models
+        if path_mapping:
+            require_update_action_models[:] = [path_mapping.get(path, path) for path in require_update_action_models]
+
+        class Processed(NamedTuple):
+            module: tuple[str, ...]
+            module_key: tuple[str, ...]  # Original module tuple (without file extension)
+            models: list[DataModel]
+            init: bool
+            imports: Imports
+            scoped_model_resolver: ModelResolver
+
+        processed_models: list[Processed] = []
+
+        for module_, models in module_models:
+            imports = module_to_import[module_] = Imports(self.use_exact_imports)
+            init = False
+            if module_:
+                if len(module_) == 1:
+                    parent = ("__init__.py",)
+                    if parent not in results:
+                        results[parent] = Result(body="")
+                else:
+                    for i in range(1, len(module_)):
+                        parent = (*module_[:i], "__init__.py")
+                        if parent not in results:
+                            results[parent] = Result(body="")
+                if (*module_, "__init__.py") in results:
+                    module = (*module_, "__init__.py")
+                    init = True
+                else:
+                    module = tuple(part.replace("-", "_") for part in (*module_[:-1], f"{module_[-1]}.py"))
+            else:
+                module = ("__init__.py",)
+
+            all_module_fields = {field.name for model in models for field in model.fields if field.name is not None}
+            scoped_model_resolver = ModelResolver(exclude_names=all_module_fields)
+
+            self.__alias_shadowed_imports(models, all_module_fields)
+            self.__override_required_field(models)
+            self.__replace_unique_list_to_set(models)
+            self.__change_from_import(
+                models,
+                imports,
+                scoped_model_resolver,
+                init=init,
+                internal_modules=internal_modules,
+                model_path_to_module_name=model_path_to_module_name,
+            )
+            self.__extract_inherited_enum(models)
+            self.__set_reference_default_value_to_field(models)
+            self.__reuse_model(models, require_update_action_models)
+            self.__collapse_root_models(models, unused_models, imports, scoped_model_resolver)
+            self.__set_default_enum_member(models)
+            self.__wrap_root_model_default_values(models)
+            self.__sort_models(
+                models,
+                imports,
+                use_deferred_annotations=bool(
+                    self.target_python_version.has_native_deferred_annotations
+                    or (with_import and not disable_future_imports)
+                ),
+            )
+            self.__change_field_name(models)
+            self.__apply_discriminator_type(models, imports)
+            self.__set_one_literal_on_default(models)
+            self.__fix_dataclass_field_ordering(models)
+
+            processed_models.append(Processed(module, module_, models, init, imports, scoped_model_resolver))
+
+        for processed_model in processed_models:
+            for model in processed_model.models:
+                processed_model.imports.append(model.imports)
+
+        for unused_model in unused_models:
+            module, models = model_to_module_models[unused_model]
+            if unused_model in models:  # pragma: no cover
+                imports = module_to_import[module]
+                imports.remove(unused_model.imports)
+                models.remove(unused_model)
+
+        for processed_model in processed_models:
+            # postprocess imports to remove unused imports.
+            used_names = self._collect_used_names_from_models(processed_model.models)
+            unused_imports = [
+                (from_, import_)
+                for from_, imports_ in processed_model.imports.items()
+                for import_ in imports_
+                if not {processed_model.imports.alias.get(from_, {}).get(import_, import_), import_}.intersection(
+                    used_names
+                )
+            ]
+            for from_, import_ in unused_imports:
+                import_obj = Import(from_=from_, import_=import_)
+                while processed_model.imports.counter.get((from_, import_), 0) > 0:
+                    processed_model.imports.remove(import_obj)
+
+        for module, mod_key, models, init, imports, scoped_model_resolver in processed_models:  # noqa: B007
+            # process after removing unused models
+            self.__change_imported_model_name(models, imports, scoped_model_resolver)
+
+        future_imports = self.imports.extract_future()
+        future_imports_str = str(future_imports)
+
+        for module, mod_key, models, init, imports, scoped_model_resolver in processed_models:  # noqa: B007
+            result: list[str] = []
+            export_imports: Imports | None = None
+
+            if all_exports_scope is not None and module[-1] == "__init__.py":
+                child_exports = self._collect_exports_for_init(module, processed_models, all_exports_scope)
+                if child_exports:
+                    local_model_names = {
+                        m.reference.short_name
+                        for m in models
+                        if m.reference and not m.reference.short_name.startswith("_")
+                    }
+                    resolved_exports = self._resolve_export_collisions(
+                        child_exports, all_exports_collision_strategy, local_model_names
+                    )
+                    export_imports = self._build_all_exports_code(resolved_exports)
+
+            if models:
+                if with_import:
+                    import_parts = [s for s in [future_imports_str, str(self.imports), str(imports)] if s]
+                    result += [*import_parts, "\n"]
+
+                if export_imports:
+                    result += [str(export_imports), ""]
+                    for m in models:
+                        if m.reference and not m.reference.short_name.startswith("_"):  # pragma: no branch
+                            export_imports.add_export(m.reference.short_name)
+                    result += [export_imports.dump_all(multiline=True) + "\n"]
+
+                self.__update_type_aliases(models)
+                code = dump_templates(models)
+                result += [code]
+
+                result += self.__get_resolve_reference_action_parts(
+                    models,
+                    require_update_action_models,
+                    use_deferred_annotations=use_deferred_annotations,
+                )
+
+            # Generate forwarder content for modules that had models moved to _internal
+            if not result and mod_key in forwarder_map:
+                internal_module, class_mappings = forwarder_map[mod_key]
+                forwarder_content = self.__generate_forwarder_content(
+                    mod_key, internal_module, class_mappings, is_init=init
+                )
+                result = [forwarder_content]
+
+            if not result and not init:
+                continue
+            body = "\n".join(result)
+            if code_formatter:
+                body = code_formatter.format_code(body)
+
+            results[module] = Result(
+                body=body,
+                future_imports=future_imports_str,
+                source=models[0].file_path if models else None,
+            )
+
+        if all_exports_scope is not None:
+            processed_init_modules = {m for m, _, _, _, _, _ in processed_models if m[-1] == "__init__.py"}
+            for init_module, init_result in list(results.items()):
+                if init_module[-1] != "__init__.py" or init_module in processed_init_modules or init_result.body:
+                    continue
+                if child_exports := self._collect_exports_for_init(
+                    init_module, processed_models, all_exports_scope
+                ):  # pragma: no branch
+                    resolved = self._resolve_export_collisions(child_exports, all_exports_collision_strategy, set())
+                    export_imports = self._build_all_exports_code(resolved)
+                    import_parts = [s for s in [future_imports_str, str(self.imports)] if s] if with_import else []
+                    parts = import_parts + (["\n"] if import_parts else [])
+                    parts += [str(export_imports), "", export_imports.dump_all(multiline=True)]
+                    body = "\n".join(parts)
+                    results[init_module] = Result(
+                        body=code_formatter.format_code(body) if code_formatter else body,
+                        future_imports=future_imports_str,
+                    )
+
+        # retain existing behaviour
+        if [*results] == [("__init__.py",)]:
+            single_result = results["__init__.py",]
+            return single_result.body
+
+        results = {tuple(i.replace("-", "_") for i in k): v for k, v in results.items()}
+        return (
+            self.__postprocess_result_modules(results)
+            if self.treat_dot_as_module
+            else {
+                tuple((part[: part.rfind(".")].replace(".", "_") + part[part.rfind(".") :]) for part in k): v
+                for k, v in results.items()
+            }
+        )
diff -pruN 0.26.4-3/src/datamodel_code_generator/parser/graphql.py 0.45.0-1/src/datamodel_code_generator/parser/graphql.py
--- 0.26.4-3/src/datamodel_code_generator/parser/graphql.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/parser/graphql.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,652 @@
+"""GraphQL schema parser implementation.
+
+Parses GraphQL schema files to generate Python data models including
+objects, interfaces, enums, scalars, inputs, and union types.
+"""
+
+from __future__ import annotations
+
+from pathlib import Path
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+)
+from urllib.parse import ParseResult
+
+from datamodel_code_generator import (
+    DEFAULT_SHARED_MODULE_NAME,
+    AllOfMergeMode,
+    DataclassArguments,
+    DefaultPutDict,
+    LiteralType,
+    PythonVersion,
+    PythonVersionMin,
+    ReadOnlyWriteOnlyModelType,
+    ReuseScope,
+    snooper_to_methods,
+)
+from datamodel_code_generator.format import DEFAULT_FORMATTERS, DatetimeClassType, Formatter
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model import pydantic as pydantic_model
+from datamodel_code_generator.model.dataclass import DataClass
+from datamodel_code_generator.model.enum import SPECIALIZED_ENUM_TYPE_MATCH, Enum
+from datamodel_code_generator.model.scalar import DataTypeScalarBackport
+from datamodel_code_generator.model.union import DataTypeUnionBackport
+from datamodel_code_generator.parser.base import (
+    DataType,
+    Parser,
+    Source,
+    escape_characters,
+)
+from datamodel_code_generator.reference import ModelType, Reference
+from datamodel_code_generator.types import DataTypeManager, StrictTypes, Types
+
+try:
+    import graphql
+except ImportError as exc:  # pragma: no cover
+    msg = "Please run `$pip install 'datamodel-code-generator[graphql]`' to generate data-model from a GraphQL schema."
+    raise Exception(msg) from exc  # noqa: TRY002
+
+
+if TYPE_CHECKING:
+    from collections import defaultdict
+    from collections.abc import Iterable, Iterator, Mapping, Sequence
+
+# graphql-core >=3.2.7 removed TypeResolvers in favor of TypeFields.kind.
+# Normalize to a single callable for resolving type kinds.
+try:  # graphql-core < 3.2.7
+    graphql_resolver_kind = graphql.type.introspection.TypeResolvers().kind  # pyright: ignore[reportAttributeAccessIssue]
+except AttributeError:  # pragma: no cover - executed on newer graphql-core
+    graphql_resolver_kind = graphql.type.introspection.TypeFields.kind  # pyright: ignore[reportAttributeAccessIssue]
+
+
+def build_graphql_schema(schema_str: str) -> graphql.GraphQLSchema:
+    """Build a graphql schema from a string."""
+    schema = graphql.build_schema(schema_str)
+    return graphql.lexicographic_sort_schema(schema)
+
+
+@snooper_to_methods()
+class GraphQLParser(Parser):
+    """Parser for GraphQL schema files."""
+
+    # raw graphql schema as `graphql-core` object
+    raw_obj: graphql.GraphQLSchema
+    # all processed graphql objects
+    # mapper from an object name (unique) to an object
+    all_graphql_objects: dict[str, graphql.GraphQLNamedType]
+    # a reference for each object
+    # mapper from an object name to his reference
+    references: dict[str, Reference] = {}  # noqa: RUF012
+    # mapper from graphql type to all objects with this type
+    # `graphql.type.introspection.TypeKind` -- an enum with all supported types
+    # `graphql.GraphQLNamedType` -- base type for each graphql object
+    # see `graphql-core` for more details
+    support_graphql_types: dict[graphql.type.introspection.TypeKind, list[graphql.GraphQLNamedType]]
+    # graphql types order for render
+    # may be as a parameter in the future
+    parse_order: list[graphql.type.introspection.TypeKind] = [  # noqa: RUF012
+        graphql.type.introspection.TypeKind.SCALAR,
+        graphql.type.introspection.TypeKind.ENUM,
+        graphql.type.introspection.TypeKind.INTERFACE,
+        graphql.type.introspection.TypeKind.OBJECT,
+        graphql.type.introspection.TypeKind.INPUT_OBJECT,
+        graphql.type.introspection.TypeKind.UNION,
+    ]
+
+    def __init__(  # noqa: PLR0913
+        self,
+        source: str | Path | ParseResult,
+        *,
+        data_model_type: type[DataModel] = pydantic_model.BaseModel,
+        data_model_root_type: type[DataModel] = pydantic_model.CustomRootType,
+        data_model_scalar_type: type[DataModel] = DataTypeScalarBackport,
+        data_model_union_type: type[DataModel] = DataTypeUnionBackport,
+        data_type_manager_type: type[DataTypeManager] = pydantic_model.DataTypeManager,
+        data_model_field_type: type[DataModelFieldBase] = pydantic_model.DataModelField,
+        base_class: str | None = None,
+        additional_imports: list[str] | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        target_python_version: PythonVersion = PythonVersionMin,
+        dump_resolve_reference_action: Callable[[Iterable[str]], str] | None = None,
+        validation: bool = False,
+        field_constraints: bool = False,
+        snake_case_field: bool = False,
+        strip_default_none: bool = False,
+        aliases: Mapping[str, str] | None = None,
+        allow_population_by_field_name: bool = False,
+        apply_default_values_for_required_fields: bool = False,
+        allow_extra_fields: bool = False,
+        extra_fields: str | None = None,
+        force_optional_for_required_fields: bool = False,
+        class_name: str | None = None,
+        use_standard_collections: bool = False,
+        base_path: Path | None = None,
+        use_schema_description: bool = False,
+        use_field_description: bool = False,
+        use_attribute_docstrings: bool = False,
+        use_inline_field_description: bool = False,
+        use_default_kwarg: bool = False,
+        reuse_model: bool = False,
+        reuse_scope: ReuseScope | None = None,
+        shared_module_name: str = DEFAULT_SHARED_MODULE_NAME,
+        encoding: str = "utf-8",
+        enum_field_as_literal: LiteralType | None = None,
+        set_default_enum_member: bool = False,
+        use_subclass_enum: bool = False,
+        use_specialized_enum: bool = True,
+        strict_nullable: bool = False,
+        use_generic_container_types: bool = False,
+        enable_faux_immutability: bool = False,
+        remote_text_cache: DefaultPutDict[str, str] | None = None,
+        disable_appending_item_suffix: bool = False,
+        strict_types: Sequence[StrictTypes] | None = None,
+        empty_enum_field_name: str | None = None,
+        custom_class_name_generator: Callable[[str], str] | None = None,
+        field_extra_keys: set[str] | None = None,
+        field_include_all_keys: bool = False,
+        field_extra_keys_without_x_prefix: set[str] | None = None,
+        wrap_string_literal: bool | None = None,
+        use_title_as_name: bool = False,
+        use_operation_id_as_name: bool = False,
+        use_unique_items_as_set: bool = False,
+        allof_merge_mode: AllOfMergeMode = AllOfMergeMode.Constraints,
+        http_headers: Sequence[tuple[str, str]] | None = None,
+        http_ignore_tls: bool = False,
+        use_annotated: bool = False,
+        use_non_positive_negative_number_constrained_types: bool = False,
+        use_decimal_for_multiple_of: bool = False,
+        original_field_name_delimiter: str | None = None,
+        use_double_quotes: bool = False,
+        use_union_operator: bool = False,
+        allow_responses_without_content: bool = False,
+        collapse_root_models: bool = False,
+        skip_root_model: bool = False,
+        use_type_alias: bool = False,
+        special_field_name_prefix: str | None = None,
+        remove_special_field_name_prefix: bool = False,
+        capitalise_enum_members: bool = False,
+        keep_model_order: bool = False,
+        use_one_literal_as_default: bool = False,
+        use_enum_values_in_discriminator: bool = False,
+        known_third_party: list[str] | None = None,
+        custom_formatters: list[str] | None = None,
+        custom_formatters_kwargs: dict[str, Any] | None = None,
+        use_pendulum: bool = False,
+        http_query_parameters: Sequence[tuple[str, str]] | None = None,
+        treat_dot_as_module: bool = False,
+        use_exact_imports: bool = False,
+        default_field_extras: dict[str, Any] | None = None,
+        target_datetime_class: DatetimeClassType = DatetimeClassType.Datetime,
+        keyword_only: bool = False,
+        frozen_dataclasses: bool = False,
+        no_alias: bool = False,
+        formatters: list[Formatter] = DEFAULT_FORMATTERS,
+        parent_scoped_naming: bool = False,
+        dataclass_arguments: DataclassArguments | None = None,
+        type_mappings: list[str] | None = None,
+        read_only_write_only_model_type: ReadOnlyWriteOnlyModelType | None = None,
+        use_serialize_as_any: bool = False,
+        use_frozen_field: bool = False,
+    ) -> None:
+        """Initialize the GraphQL parser with configuration options."""
+        super().__init__(
+            source=source,
+            data_model_type=data_model_type,
+            data_model_root_type=data_model_root_type,
+            data_type_manager_type=data_type_manager_type,
+            data_model_field_type=data_model_field_type,
+            base_class=base_class,
+            additional_imports=additional_imports,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            target_python_version=target_python_version,
+            dump_resolve_reference_action=dump_resolve_reference_action,
+            validation=validation,
+            field_constraints=field_constraints,
+            snake_case_field=snake_case_field,
+            strip_default_none=strip_default_none,
+            aliases=aliases,
+            allow_population_by_field_name=allow_population_by_field_name,
+            allow_extra_fields=allow_extra_fields,
+            extra_fields=extra_fields,
+            apply_default_values_for_required_fields=apply_default_values_for_required_fields,
+            force_optional_for_required_fields=force_optional_for_required_fields,
+            class_name=class_name,
+            use_standard_collections=use_standard_collections,
+            base_path=base_path,
+            use_schema_description=use_schema_description,
+            use_field_description=use_field_description,
+            use_attribute_docstrings=use_attribute_docstrings,
+            use_inline_field_description=use_inline_field_description,
+            use_default_kwarg=use_default_kwarg,
+            reuse_model=reuse_model,
+            reuse_scope=reuse_scope,
+            shared_module_name=shared_module_name,
+            encoding=encoding,
+            enum_field_as_literal=enum_field_as_literal,
+            use_one_literal_as_default=use_one_literal_as_default,
+            use_enum_values_in_discriminator=use_enum_values_in_discriminator,
+            set_default_enum_member=set_default_enum_member,
+            use_subclass_enum=use_subclass_enum,
+            use_specialized_enum=use_specialized_enum,
+            strict_nullable=strict_nullable,
+            use_generic_container_types=use_generic_container_types,
+            enable_faux_immutability=enable_faux_immutability,
+            remote_text_cache=remote_text_cache,
+            disable_appending_item_suffix=disable_appending_item_suffix,
+            strict_types=strict_types,
+            empty_enum_field_name=empty_enum_field_name,
+            custom_class_name_generator=custom_class_name_generator,
+            field_extra_keys=field_extra_keys,
+            field_include_all_keys=field_include_all_keys,
+            field_extra_keys_without_x_prefix=field_extra_keys_without_x_prefix,
+            wrap_string_literal=wrap_string_literal,
+            use_title_as_name=use_title_as_name,
+            use_operation_id_as_name=use_operation_id_as_name,
+            use_unique_items_as_set=use_unique_items_as_set,
+            allof_merge_mode=allof_merge_mode,
+            http_headers=http_headers,
+            http_ignore_tls=http_ignore_tls,
+            use_annotated=use_annotated,
+            use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
+            use_decimal_for_multiple_of=use_decimal_for_multiple_of,
+            original_field_name_delimiter=original_field_name_delimiter,
+            use_double_quotes=use_double_quotes,
+            use_union_operator=use_union_operator,
+            allow_responses_without_content=allow_responses_without_content,
+            collapse_root_models=collapse_root_models,
+            skip_root_model=skip_root_model,
+            use_type_alias=use_type_alias,
+            special_field_name_prefix=special_field_name_prefix,
+            remove_special_field_name_prefix=remove_special_field_name_prefix,
+            capitalise_enum_members=capitalise_enum_members,
+            keep_model_order=keep_model_order,
+            known_third_party=known_third_party,
+            custom_formatters=custom_formatters,
+            custom_formatters_kwargs=custom_formatters_kwargs,
+            use_pendulum=use_pendulum,
+            http_query_parameters=http_query_parameters,
+            treat_dot_as_module=treat_dot_as_module,
+            use_exact_imports=use_exact_imports,
+            default_field_extras=default_field_extras,
+            target_datetime_class=target_datetime_class,
+            keyword_only=keyword_only,
+            frozen_dataclasses=frozen_dataclasses,
+            no_alias=no_alias,
+            formatters=formatters,
+            parent_scoped_naming=parent_scoped_naming,
+            dataclass_arguments=dataclass_arguments,
+            type_mappings=type_mappings,
+            read_only_write_only_model_type=read_only_write_only_model_type,
+            use_serialize_as_any=use_serialize_as_any,
+            use_frozen_field=use_frozen_field,
+        )
+
+        self.data_model_scalar_type = data_model_scalar_type
+        self.data_model_union_type = data_model_union_type
+        self.use_standard_collections = use_standard_collections
+        self.use_union_operator = use_union_operator
+
+    def _get_context_source_path_parts(self) -> Iterator[tuple[Source, list[str]]]:
+        # TODO (denisart): Temporarily this method duplicates
+        # the method `datamodel_code_generator.parser.jsonschema.JsonSchemaParser._get_context_source_path_parts`.
+
+        if isinstance(self.source, list) or (  # pragma: no cover
+            isinstance(self.source, Path) and self.source.is_dir()
+        ):  # pragma: no cover
+            self.current_source_path = Path()
+            self.model_resolver.after_load_files = {
+                self.base_path.joinpath(s.path).resolve().as_posix() for s in self.iter_source
+            }
+
+        for source in self.iter_source:
+            if isinstance(self.source, ParseResult):  # pragma: no cover
+                path_parts = self.get_url_path_parts(self.source)
+            else:
+                path_parts = list(source.path.parts)
+            if self.current_source_path is not None:  # pragma: no cover
+                self.current_source_path = source.path
+            with (
+                self.model_resolver.current_base_path_context(source.path.parent),
+                self.model_resolver.current_root_context(path_parts),
+            ):
+                yield source, path_parts
+
+    def _resolve_types(self, paths: list[str], schema: graphql.GraphQLSchema) -> None:
+        for type_name, type_ in schema.type_map.items():
+            if type_name.startswith("__"):
+                continue
+
+            if type_name in {"Query", "Mutation"}:
+                continue
+
+            resolved_type = graphql_resolver_kind(type_, None)
+
+            if resolved_type in self.support_graphql_types:  # pragma: no cover
+                self.all_graphql_objects[type_.name] = type_
+                # TODO: need a special method for each graph type
+                self.references[type_.name] = Reference(
+                    path=f"{paths!s}/{resolved_type.value}/{type_.name}",
+                    name=type_.name,
+                    original_name=type_.name,
+                )
+
+                self.support_graphql_types[resolved_type].append(type_)
+
+    def _create_data_model(self, model_type: type[DataModel] | None = None, **kwargs: Any) -> DataModel:
+        """Create data model instance with dataclass_arguments support for DataClass."""
+        data_model_class = model_type or self.data_model_type
+        if issubclass(data_model_class, DataClass):
+            # Use dataclass_arguments from kwargs, or fall back to self.dataclass_arguments
+            # If both are None, construct from legacy frozen_dataclasses/keyword_only flags
+            dataclass_arguments = kwargs.pop("dataclass_arguments", None)
+            if dataclass_arguments is None:
+                dataclass_arguments = self.dataclass_arguments
+            if dataclass_arguments is None:
+                # Construct from legacy flags for library API compatibility
+                dataclass_arguments = {}
+                if self.frozen_dataclasses:
+                    dataclass_arguments["frozen"] = True
+                if self.keyword_only:
+                    dataclass_arguments["kw_only"] = True
+            kwargs["dataclass_arguments"] = dataclass_arguments
+            kwargs.pop("frozen", None)
+            kwargs.pop("keyword_only", None)
+        else:
+            kwargs.pop("dataclass_arguments", None)
+        return data_model_class(**kwargs)
+
+    def _typename_field(self, name: str) -> DataModelFieldBase:
+        return self.data_model_field_type(
+            name="typename__",
+            data_type=DataType(
+                literals=[name],
+                use_union_operator=self.use_union_operator,
+                use_standard_collections=self.use_standard_collections,
+            ),
+            default=name,
+            use_annotated=self.use_annotated,
+            required=False,
+            alias="__typename",
+            use_one_literal_as_default=True,
+            use_default_kwarg=self.use_default_kwarg,
+            has_default=True,
+        )
+
+    def _get_default(  # noqa: PLR6301
+        self,
+        field: graphql.GraphQLField | graphql.GraphQLInputField,
+        final_data_type: DataType,
+        *,
+        required: bool,
+    ) -> Any:
+        if isinstance(field, graphql.GraphQLInputField):  # pragma: no cover
+            if field.default_value == graphql.pyutils.Undefined:  # pragma: no cover
+                return None
+            return field.default_value
+        if required is False and final_data_type.is_list:
+            return None
+
+        return None
+
+    def parse_scalar(self, scalar_graphql_object: graphql.GraphQLScalarType) -> None:
+        """Parse a GraphQL scalar type and add it to results."""
+        self.results.append(
+            self.data_model_scalar_type(
+                reference=self.references[scalar_graphql_object.name],
+                fields=[],
+                custom_template_dir=self.custom_template_dir,
+                extra_template_data=self.extra_template_data,
+                description=scalar_graphql_object.description,
+            )
+        )
+
+    def should_parse_enum_as_literal(self, obj: graphql.GraphQLEnumType) -> bool:
+        """Determine if an enum should be parsed as a literal type."""
+        return self.enum_field_as_literal == LiteralType.All or (
+            self.enum_field_as_literal == LiteralType.One and len(obj.values) == 1
+        )
+
+    def parse_enum(self, enum_object: graphql.GraphQLEnumType) -> None:
+        """Parse a GraphQL enum type and add it to results."""
+        if self.should_parse_enum_as_literal(enum_object):
+            return self.parse_enum_as_literal(enum_object)
+        return self.parse_enum_as_enum_class(enum_object)
+
+    def parse_enum_as_literal(self, enum_object: graphql.GraphQLEnumType) -> None:
+        """Parse enum values as a Literal type."""
+        data_type = self.data_type(literals=list(enum_object.values.keys()))
+        data_model_type = self._create_data_model(
+            model_type=self.data_model_root_type,
+            reference=self.references[enum_object.name],
+            fields=[
+                self.data_model_field_type(
+                    required=True,
+                    data_type=data_type,
+                )
+            ],
+            custom_base_class=self.base_class,
+            custom_template_dir=self.custom_template_dir,
+            extra_template_data=self.extra_template_data,
+            path=self.current_source_path,
+            description=enum_object.description,
+        )
+        self.results.append(data_model_type)
+
+    def parse_enum_as_enum_class(self, enum_object: graphql.GraphQLEnumType) -> None:
+        """Parse enum values as an Enum class."""
+        enum_fields: list[DataModelFieldBase] = []
+        exclude_field_names: set[str] = set()
+
+        for value_name, value in enum_object.values.items():
+            default = f"'{value_name.translate(escape_characters)}'" if isinstance(value_name, str) else value_name
+
+            field_name = self.model_resolver.get_valid_field_name(
+                value_name, excludes=exclude_field_names, model_type=ModelType.ENUM
+            )
+            exclude_field_names.add(field_name)
+
+            enum_fields.append(
+                self.data_model_field_type(
+                    name=field_name,
+                    data_type=self.data_type_manager.get_data_type(
+                        Types.string,
+                    ),
+                    default=default,
+                    required=True,
+                    strip_default_none=self.strip_default_none,
+                    has_default=True,
+                    use_field_description=value.description is not None,
+                    original_name=None,
+                )
+            )
+
+        enum_cls: type[Enum] = Enum
+        if (
+            self.target_python_version.has_strenum
+            and self.use_specialized_enum
+            and (specialized_type := SPECIALIZED_ENUM_TYPE_MATCH.get(Types.string))
+        ):
+            # If specialized enum is available in the target Python version, use it
+            enum_cls = specialized_type
+
+        enum: Enum = enum_cls(
+            reference=self.references[enum_object.name],
+            fields=enum_fields,
+            path=self.current_source_path,
+            description=enum_object.description,
+            type_=Types.string if self.use_subclass_enum else None,
+            custom_template_dir=self.custom_template_dir,
+        )
+        self.results.append(enum)
+
+    def parse_field(
+        self,
+        field_name: str,
+        alias: str | None,
+        field: graphql.GraphQLField | graphql.GraphQLInputField,
+    ) -> DataModelFieldBase:
+        """Parse a GraphQL field and return a data model field."""
+        final_data_type = DataType(
+            is_optional=True,
+            use_union_operator=self.use_union_operator,
+            use_standard_collections=self.use_standard_collections,
+        )
+        data_type = final_data_type
+        obj = field.type
+
+        while graphql.is_list_type(obj) or graphql.is_non_null_type(obj):
+            if graphql.is_list_type(obj):
+                data_type.is_list = True
+
+                new_data_type = DataType(
+                    is_optional=True,
+                    use_union_operator=self.use_union_operator,
+                    use_standard_collections=self.use_standard_collections,
+                )
+                data_type.data_types = [new_data_type]
+
+                data_type = new_data_type
+            elif graphql.is_non_null_type(obj):  # pragma: no cover
+                data_type.is_optional = False
+
+            obj = graphql.assert_wrapping_type(obj)
+            obj = obj.of_type
+
+        obj = graphql.assert_named_type(obj)
+        if obj.name in self.references:
+            data_type.reference = self.references[obj.name]
+        else:  # pragma: no cover
+            # Only happens for Query and Mutation root types
+            data_type.type = obj.name
+
+        required = (not self.force_optional_for_required_fields) and (not final_data_type.is_optional)
+
+        default = self._get_default(field, final_data_type, required=required)
+        extras = {} if self.default_field_extras is None else self.default_field_extras.copy()
+
+        if field.description is not None:  # pragma: no cover
+            extras["description"] = field.description
+
+        return self.data_model_field_type(
+            name=field_name,
+            default=default,
+            data_type=final_data_type,
+            required=required,
+            extras=extras,
+            alias=alias,
+            strip_default_none=self.strip_default_none,
+            use_annotated=self.use_annotated,
+            use_serialize_as_any=self.use_serialize_as_any,
+            use_field_description=self.use_field_description,
+            use_inline_field_description=self.use_inline_field_description,
+            use_default_kwarg=self.use_default_kwarg,
+            original_name=field_name,
+            has_default=default is not None,
+        )
+
+    def parse_object_like(
+        self,
+        obj: graphql.GraphQLInterfaceType | graphql.GraphQLObjectType | graphql.GraphQLInputObjectType,
+    ) -> None:
+        """Parse a GraphQL object-like type and add it to results."""
+        fields = []
+        exclude_field_names: set[str] = set()
+
+        for field_name, field in obj.fields.items():
+            field_name_, alias = self.model_resolver.get_valid_field_name_and_alias(
+                field_name,
+                excludes=exclude_field_names,
+                model_type=self.field_name_model_type,
+                class_name=obj.name,
+            )
+            exclude_field_names.add(field_name_)
+
+            data_model_field_type = self.parse_field(field_name_, alias, field)
+            fields.append(data_model_field_type)
+
+        fields.append(self._typename_field(obj.name))
+
+        base_classes = []
+        if hasattr(obj, "interfaces"):  # pragma: no cover
+            base_classes = [self.references[i.name] for i in obj.interfaces]  # pyright: ignore[reportAttributeAccessIssue]
+
+        data_model_type = self._create_data_model(
+            reference=self.references[obj.name],
+            fields=fields,
+            base_classes=base_classes,
+            custom_base_class=self.base_class,
+            custom_template_dir=self.custom_template_dir,
+            extra_template_data=self.extra_template_data,
+            path=self.current_source_path,
+            description=obj.description,
+            keyword_only=self.keyword_only,
+            treat_dot_as_module=self.treat_dot_as_module,
+            dataclass_arguments=self.dataclass_arguments,
+        )
+        self.results.append(data_model_type)
+
+    def parse_interface(self, interface_graphql_object: graphql.GraphQLInterfaceType) -> None:
+        """Parse a GraphQL interface type and add it to results."""
+        self.parse_object_like(interface_graphql_object)
+
+    def parse_object(self, graphql_object: graphql.GraphQLObjectType) -> None:
+        """Parse a GraphQL object type and add it to results."""
+        self.parse_object_like(graphql_object)
+
+    def parse_input_object(self, input_graphql_object: graphql.GraphQLInputObjectType) -> None:
+        """Parse a GraphQL input object type and add it to results."""
+        self.parse_object_like(input_graphql_object)  # pragma: no cover
+
+    def parse_union(self, union_object: graphql.GraphQLUnionType) -> None:
+        """Parse a GraphQL union type and add it to results."""
+        fields = [self.data_model_field_type(name=type_.name, data_type=DataType()) for type_ in union_object.types]
+
+        data_model_type = self.data_model_union_type(
+            reference=self.references[union_object.name],
+            fields=fields,
+            custom_base_class=self.base_class,
+            custom_template_dir=self.custom_template_dir,
+            extra_template_data=self.extra_template_data,
+            path=self.current_source_path,
+            description=union_object.description,
+        )
+        self.results.append(data_model_type)
+
+    def parse_raw(self) -> None:
+        """Parse the raw GraphQL schema and generate all data models."""
+        self.all_graphql_objects = {}
+        self.references: dict[str, Reference] = {}
+
+        self.support_graphql_types = {
+            graphql.type.introspection.TypeKind.SCALAR: [],
+            graphql.type.introspection.TypeKind.ENUM: [],
+            graphql.type.introspection.TypeKind.UNION: [],
+            graphql.type.introspection.TypeKind.INTERFACE: [],
+            graphql.type.introspection.TypeKind.OBJECT: [],
+            graphql.type.introspection.TypeKind.INPUT_OBJECT: [],
+        }
+
+        # may be as a parameter in the future (??)
+        mapper_from_graphql_type_to_parser_method = {
+            graphql.type.introspection.TypeKind.SCALAR: self.parse_scalar,
+            graphql.type.introspection.TypeKind.ENUM: self.parse_enum,
+            graphql.type.introspection.TypeKind.INTERFACE: self.parse_interface,
+            graphql.type.introspection.TypeKind.OBJECT: self.parse_object,
+            graphql.type.introspection.TypeKind.INPUT_OBJECT: self.parse_input_object,
+            graphql.type.introspection.TypeKind.UNION: self.parse_union,
+        }
+
+        for source, path_parts in self._get_context_source_path_parts():
+            schema: graphql.GraphQLSchema = build_graphql_schema(source.text)
+            self.raw_obj = schema
+
+            self._resolve_types(path_parts, schema)
+
+            for next_type in self.parse_order:
+                for obj in self.support_graphql_types[next_type]:
+                    parser_ = mapper_from_graphql_type_to_parser_method[next_type]
+                    parser_(obj)
diff -pruN 0.26.4-3/src/datamodel_code_generator/parser/jsonschema.py 0.45.0-1/src/datamodel_code_generator/parser/jsonschema.py
--- 0.26.4-3/src/datamodel_code_generator/parser/jsonschema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/parser/jsonschema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3148 @@
+"""JSON Schema parser implementation.
+
+Handles parsing of JSON Schema, JSON, YAML, Dict, and CSV inputs to generate
+Python data models. Supports draft-04 through draft-2020-12 schemas.
+"""
+
+from __future__ import annotations
+
+import enum as _enum
+import json
+from collections import defaultdict
+from collections.abc import Iterable
+from contextlib import contextmanager, suppress
+from functools import cached_property, lru_cache
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, Callable, ClassVar, Literal, Optional, Union
+from urllib.parse import ParseResult, unquote
+from warnings import warn
+
+from pydantic import (
+    Field,
+)
+
+from datamodel_code_generator import (
+    DEFAULT_SHARED_MODULE_NAME,
+    AllOfMergeMode,
+    DataclassArguments,
+    InvalidClassNameError,
+    ReadOnlyWriteOnlyModelType,
+    ReuseScope,
+    YamlValue,
+    load_yaml,
+    load_yaml_dict,
+    load_yaml_dict_from_path,
+    snooper_to_methods,
+)
+from datamodel_code_generator.format import (
+    DEFAULT_FORMATTERS,
+    DatetimeClassType,
+    Formatter,
+    PythonVersion,
+    PythonVersionMin,
+)
+from datamodel_code_generator.imports import IMPORT_ANY
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model import pydantic as pydantic_model
+from datamodel_code_generator.model.base import UNDEFINED, get_module_name, sanitize_module_name
+from datamodel_code_generator.model.dataclass import DataClass
+from datamodel_code_generator.model.enum import (
+    SPECIALIZED_ENUM_TYPE_MATCH,
+    Enum,
+    StrEnum,
+)
+from datamodel_code_generator.parser import DefaultPutDict, LiteralType
+from datamodel_code_generator.parser.base import (
+    SPECIAL_PATH_FORMAT,
+    Parser,
+    Source,
+    escape_characters,
+    get_special_path,
+    title_to_class_name,
+)
+from datamodel_code_generator.reference import SPECIAL_PATH_MARKER, ModelType, Reference, is_url
+from datamodel_code_generator.types import (
+    ANY,
+    DataType,
+    DataTypeManager,
+    EmptyDataType,
+    StrictTypes,
+    Types,
+    UnionIntFloat,
+)
+from datamodel_code_generator.util import (
+    PYDANTIC_V2,
+    BaseModel,
+    field_validator,
+    model_validator,
+)
+
+if PYDANTIC_V2:
+    from pydantic import ConfigDict
+
+if TYPE_CHECKING:
+    from collections.abc import Generator, Iterable, Iterator, Mapping, Sequence
+
+
+def unescape_json_pointer_segment(segment: str) -> str:
+    """Unescape JSON pointer segment by converting escape sequences and percent-encoding."""
+    # Unescape ~1, ~0, and percent-encoding
+    return unquote(segment.replace("~1", "/").replace("~0", "~"))
+
+
+def get_model_by_path(
+    schema: dict[str, YamlValue] | list[YamlValue], keys: list[str] | list[int]
+) -> dict[str, YamlValue]:
+    """Retrieve a model from schema by traversing the given path keys."""
+    if not keys:
+        if isinstance(schema, dict):
+            return schema
+        msg = f"Does not support json pointer to array. schema={schema}, key={keys}"  # pragma: no cover
+        raise NotImplementedError(msg)  # pragma: no cover
+    # Unescape the key if it's a string (JSON pointer segment)
+    key = keys[0]
+    if isinstance(key, str):  # pragma: no branch
+        key = unescape_json_pointer_segment(key)
+    value = schema.get(str(key), {}) if isinstance(schema, dict) else schema[int(key)]
+    if len(keys) == 1:
+        if isinstance(value, dict):
+            return value
+        msg = f"Does not support json pointer to array. schema={schema}, key={keys}"  # pragma: no cover
+        raise NotImplementedError(msg)  # pragma: no cover
+    if isinstance(value, (dict, list)):
+        return get_model_by_path(value, keys[1:])
+    msg = f"Cannot traverse non-container value. schema={schema}, key={keys}"  # pragma: no cover
+    raise NotImplementedError(msg)  # pragma: no cover
+
+
+# TODO: This dictionary contains formats valid only for OpenAPI and not for
+#       jsonschema and vice versa. They should be separated.
+json_schema_data_formats: dict[str, dict[str, Types]] = {
+    "integer": {
+        "int32": Types.int32,
+        "int64": Types.int64,
+        "default": Types.integer,
+        "date-time": Types.date_time,
+        "unix-time": Types.int64,
+    },
+    "number": {
+        "float": Types.float,
+        "double": Types.double,
+        "decimal": Types.decimal,
+        "date-time": Types.date_time,
+        "time": Types.time,
+        "default": Types.number,
+    },
+    "string": {
+        "default": Types.string,
+        "byte": Types.byte,  # base64 encoded string
+        "binary": Types.binary,
+        "date": Types.date,
+        "date-time": Types.date_time,
+        "duration": Types.timedelta,
+        "time": Types.time,
+        "password": Types.password,
+        "path": Types.path,
+        "email": Types.email,
+        "idn-email": Types.email,
+        "uuid": Types.uuid,
+        "uuid1": Types.uuid1,
+        "uuid2": Types.uuid2,
+        "uuid3": Types.uuid3,
+        "uuid4": Types.uuid4,
+        "uuid5": Types.uuid5,
+        "uri": Types.uri,
+        "uri-reference": Types.string,
+        "hostname": Types.hostname,
+        "ipv4": Types.ipv4,
+        "ipv4-network": Types.ipv4_network,
+        "ipv6": Types.ipv6,
+        "ipv6-network": Types.ipv6_network,
+        "decimal": Types.decimal,
+        "integer": Types.integer,
+    },
+    "boolean": {"default": Types.boolean},
+    "object": {"default": Types.object},
+    "null": {"default": Types.null},
+    "array": {"default": Types.array},
+}
+
+
+class JSONReference(_enum.Enum):
+    """Define types of JSON references."""
+
+    LOCAL = "LOCAL"
+    REMOTE = "REMOTE"
+    URL = "URL"
+
+
+class Discriminator(BaseModel):
+    """Represent OpenAPI discriminator object."""
+
+    propertyName: str  # noqa: N815
+    mapping: Optional[dict[str, str]] = None  # noqa: UP045
+
+
+class JsonSchemaObject(BaseModel):
+    """Represent a JSON Schema object with validation and parsing capabilities."""
+
+    if not TYPE_CHECKING:
+        if PYDANTIC_V2:
+
+            @classmethod
+            def get_fields(cls) -> dict[str, Any]:
+                """Get fields for Pydantic v2 models."""
+                return cls.model_fields
+
+        else:
+
+            @classmethod
+            def get_fields(cls) -> dict[str, Any]:
+                """Get fields for Pydantic v1 models."""
+                return cls.__fields__
+
+            @classmethod
+            def model_rebuild(cls) -> None:
+                """Rebuild model by updating forward references."""
+                cls.update_forward_refs()
+
+    __constraint_fields__: set[str] = {  # noqa: RUF012
+        "exclusiveMinimum",
+        "minimum",
+        "exclusiveMaximum",
+        "maximum",
+        "multipleOf",
+        "minItems",
+        "maxItems",
+        "minLength",
+        "maxLength",
+        "pattern",
+        "uniqueItems",
+    }
+    __extra_key__: str = SPECIAL_PATH_FORMAT.format("extras")
+    __metadata_only_fields__: set[str] = {  # noqa: RUF012
+        "title",
+        "description",
+        "id",
+        "$id",
+        "$schema",
+        "$comment",
+        "examples",
+        "example",
+        "x_enum_varnames",
+        "definitions",
+        "$defs",
+        "default",
+        "readOnly",
+        "writeOnly",
+        "deprecated",
+    }
+
+    @model_validator(mode="before")
+    def validate_exclusive_maximum_and_exclusive_minimum(cls, values: Any) -> Any:  # noqa: N805
+        """Validate and convert boolean exclusive maximum and minimum to numeric values."""
+        if not isinstance(values, dict):
+            return values
+        exclusive_maximum: float | bool | None = values.get("exclusiveMaximum")
+        exclusive_minimum: float | bool | None = values.get("exclusiveMinimum")
+
+        if exclusive_maximum is True:
+            values["exclusiveMaximum"] = values["maximum"]
+            del values["maximum"]
+        elif exclusive_maximum is False:
+            del values["exclusiveMaximum"]
+        if exclusive_minimum is True:
+            values["exclusiveMinimum"] = values["minimum"]
+            del values["minimum"]
+        elif exclusive_minimum is False:
+            del values["exclusiveMinimum"]
+        return values
+
+    @field_validator("ref")
+    def validate_ref(cls, value: Any) -> Any:  # noqa: N805
+        """Validate and normalize $ref values."""
+        if isinstance(value, str) and "#" in value:
+            if value.endswith("#/"):
+                return value[:-1]
+            if "#/" in value or value[0] == "#" or value[-1] == "#":
+                return value
+            return value.replace("#", "#/")
+        return value
+
+    @field_validator("required", mode="before")
+    def validate_required(cls, value: Any) -> Any:  # noqa: N805
+        """Validate and normalize required field values."""
+        if value is None:
+            return []
+        if isinstance(value, list):  # noqa: PLR1702
+            # Filter to only include valid strings, excluding invalid objects
+            required_fields: list[str] = []
+            for item in value:
+                if isinstance(item, str):
+                    required_fields.append(item)
+
+                # In some cases, the required field can include "anyOf", "oneOf", or "allOf" as a dict (#2297)
+                elif isinstance(item, dict):
+                    for key, val in item.items():
+                        if isinstance(val, list):
+                            # If 'anyOf' or "oneOf" is present, we won't include it in required fields
+                            if key in {"anyOf", "oneOf"}:
+                                continue
+
+                            if key == "allOf":
+                                # If 'allOf' is present, we include them as required fields
+                                required_fields.extend(sub_item for sub_item in val if isinstance(sub_item, str))
+
+            value = required_fields
+
+        return value
+
+    @field_validator("type", mode="before")
+    def validate_null_type(cls, value: Any) -> Any:  # noqa: N805
+        """Validate and convert unquoted null type to string "null"."""
+        # TODO[openapi]: This should be supported only for OpenAPI 3.1+
+        # See: https://github.com/koxudaxi/datamodel-code-generator/issues/2477#issuecomment-3192480591
+        if value is None:
+            value = "null"
+        if isinstance(value, list) and None in value:
+            value = [v if v is not None else "null" for v in value]
+        return value
+
+    items: Optional[Union[list[JsonSchemaObject], JsonSchemaObject, bool]] = None  # noqa: UP007, UP045
+    uniqueItems: Optional[bool] = None  # noqa: N815, UP045
+    type: Optional[Union[str, list[str]]] = None  # noqa: UP007, UP045
+    format: Optional[str] = None  # noqa: UP045
+    pattern: Optional[str] = None  # noqa: UP045
+    minLength: Optional[int] = None  # noqa:  N815,UP045
+    maxLength: Optional[int] = None  # noqa:  N815,UP045
+    minimum: Optional[UnionIntFloat] = None  # noqa:  UP045
+    maximum: Optional[UnionIntFloat] = None  # noqa:  UP045
+    minItems: Optional[int] = None  # noqa:  N815,UP045
+    maxItems: Optional[int] = None  # noqa:  N815,UP045
+    multipleOf: Optional[float] = None  # noqa: N815, UP045
+    exclusiveMaximum: Optional[Union[float, bool]] = None  # noqa: N815, UP007, UP045
+    exclusiveMinimum: Optional[Union[float, bool]] = None  # noqa: N815, UP007, UP045
+    additionalProperties: Optional[Union[JsonSchemaObject, bool]] = None  # noqa: N815, UP007, UP045
+    patternProperties: Optional[dict[str, JsonSchemaObject]] = None  # noqa: N815, UP045
+    oneOf: list[JsonSchemaObject] = []  # noqa: N815, RUF012
+    anyOf: list[JsonSchemaObject] = []  # noqa: N815, RUF012
+    allOf: list[JsonSchemaObject] = []  # noqa: N815, RUF012
+    enum: list[Any] = []  # noqa: RUF012
+    writeOnly: Optional[bool] = None  # noqa: N815, UP045
+    readOnly: Optional[bool] = None  # noqa: N815, UP045
+    properties: Optional[dict[str, Union[JsonSchemaObject, bool]]] = None  # noqa: UP007, UP045
+    required: list[str] = []  # noqa: RUF012
+    ref: Optional[str] = Field(default=None, alias="$ref")  # noqa: UP045
+    nullable: Optional[bool] = False  # noqa: UP045
+    x_enum_varnames: list[str] = Field(default_factory=list, alias="x-enum-varnames")
+    x_enum_names: list[str] = Field(default_factory=list, alias="x-enumNames")
+    description: Optional[str] = None  # noqa: UP045
+    title: Optional[str] = None  # noqa: UP045
+    example: Any = None
+    examples: Any = None
+    default: Any = None
+    id: Optional[str] = Field(default=None, alias="$id")  # noqa: UP045
+    custom_type_path: Optional[str] = Field(default=None, alias="customTypePath")  # noqa: UP045
+    custom_base_path: Optional[str] = Field(default=None, alias="customBasePath")  # noqa: UP045
+    extras: dict[str, Any] = Field(alias=__extra_key__, default_factory=dict)
+    discriminator: Optional[Union[Discriminator, str]] = None  # noqa: UP007, UP045
+    if PYDANTIC_V2:
+        model_config = ConfigDict(  # pyright: ignore[reportPossiblyUnboundVariable]
+            arbitrary_types_allowed=True,
+            ignored_types=(cached_property,),
+        )
+    else:
+
+        class Config:
+            """Pydantic v1 configuration for JsonSchemaObject."""
+
+            arbitrary_types_allowed = True
+            keep_untouched = (cached_property,)
+            smart_casts = True
+
+    def __init__(self, **data: Any) -> None:
+        """Initialize JsonSchemaObject with extra fields handling."""
+        super().__init__(**data)
+        # Restore extras from alias key (for dict -> parse_obj round-trip)
+        alias_extras = data.get(self.__extra_key__, {})
+        # Collect custom keys from raw data
+        raw_extras = {k: v for k, v in data.items() if k not in EXCLUDE_FIELD_KEYS}
+        # Merge: raw_extras takes precedence (original data is the source of truth)
+        self.extras = {**alias_extras, **raw_extras}
+        if "const" in alias_extras:  # pragma: no cover
+            self.extras["const"] = alias_extras["const"]
+
+    @cached_property
+    def is_object(self) -> bool:
+        """Check if the schema represents an object type."""
+        return self.properties is not None or (
+            self.type == "object" and not self.allOf and not self.oneOf and not self.anyOf and not self.ref
+        )
+
+    @cached_property
+    def is_array(self) -> bool:
+        """Check if the schema represents an array type."""
+        return self.items is not None or self.type == "array"
+
+    @cached_property
+    def ref_object_name(self) -> str:  # pragma: no cover
+        """Extract the object name from the reference path."""
+        return (self.ref or "").rsplit("/", 1)[-1]
+
+    @field_validator("items", mode="before")
+    def validate_items(cls, values: Any) -> Any:  # noqa: N805
+        """Validate items field, converting empty dicts to None."""
+        # this condition expects empty dict
+        return values or None
+
+    @cached_property
+    def has_default(self) -> bool:
+        """Check if the schema has a default value or default factory."""
+        return "default" in self.__fields_set__ or "default_factory" in self.extras
+
+    @cached_property
+    def has_constraint(self) -> bool:
+        """Check if the schema has any constraint fields set."""
+        return bool(self.__constraint_fields__ & self.__fields_set__)
+
+    @cached_property
+    def ref_type(self) -> JSONReference | None:
+        """Get the reference type (LOCAL, REMOTE, or URL)."""
+        if self.ref:
+            return get_ref_type(self.ref)
+        return None  # pragma: no cover
+
+    @cached_property
+    def type_has_null(self) -> bool:
+        """Check if the type list or oneOf/anyOf contains null."""
+        if isinstance(self.type, list) and "null" in self.type:
+            return True
+        for item in self.oneOf + self.anyOf:
+            if item.type == "null":
+                return True
+            if isinstance(item.type, list) and "null" in item.type:
+                return True
+        return False
+
+    @cached_property
+    def has_multiple_types(self) -> bool:
+        """Check if the type is a list with multiple non-null types."""
+        if not isinstance(self.type, list):
+            return False
+        non_null_types = [t for t in self.type if t != "null"]
+        return len(non_null_types) > 1
+
+    @cached_property
+    def has_ref_with_schema_keywords(self) -> bool:
+        """Check if schema has $ref combined with schema-affecting keywords.
+
+        Metadata-only keywords (title, description, etc.) are excluded
+        as they don't affect the schema structure.
+        """
+        if not self.ref:
+            return False
+        other_fields = self.__fields_set__ - {"ref"}
+        schema_affecting_fields = other_fields - self.__metadata_only_fields__ - {"extras"}
+        if self.extras:
+            schema_affecting_extras = {k for k in self.extras if k not in self.__metadata_only_fields__}
+            if schema_affecting_extras:
+                schema_affecting_fields |= {"extras"}
+        return bool(schema_affecting_fields)
+
+
+@lru_cache
+def get_ref_type(ref: str) -> JSONReference:
+    """Determine the type of reference (LOCAL, REMOTE, or URL)."""
+    if ref[0] == "#":
+        return JSONReference.LOCAL
+    if is_url(ref):
+        return JSONReference.URL
+    return JSONReference.REMOTE
+
+
+def _get_type(type_: str, format__: str | None = None) -> Types:
+    """Get the appropriate Types enum for a given JSON Schema type and format."""
+    if type_ not in json_schema_data_formats:
+        return Types.any
+    if (data_formats := json_schema_data_formats[type_].get("default" if format__ is None else format__)) is not None:
+        return data_formats
+
+    warn(f"format of {format__!r} not understood for {type_!r} - using default", stacklevel=2)
+    return json_schema_data_formats[type_]["default"]
+
+
+JsonSchemaObject.model_rebuild()
+
+DEFAULT_FIELD_KEYS: set[str] = {
+    "example",
+    "examples",
+    "description",
+    "discriminator",
+    "title",
+    "const",
+    "default_factory",
+}
+
+EXCLUDE_FIELD_KEYS_IN_JSON_SCHEMA: set[str] = {
+    "readOnly",
+    "writeOnly",
+}
+
+EXCLUDE_FIELD_KEYS = (
+    set(JsonSchemaObject.get_fields())  # pyright: ignore[reportAttributeAccessIssue]
+    - DEFAULT_FIELD_KEYS
+    - EXCLUDE_FIELD_KEYS_IN_JSON_SCHEMA
+) | {
+    "$id",
+    "$ref",
+    JsonSchemaObject.__extra_key__,
+}
+
+
+@snooper_to_methods()  # noqa: PLR0904
+class JsonSchemaParser(Parser):
+    """Parser for JSON Schema, JSON, YAML, Dict, and CSV formats."""
+
+    SCHEMA_PATHS: ClassVar[list[str]] = ["#/definitions", "#/$defs"]
+    SCHEMA_OBJECT_TYPE: ClassVar[type[JsonSchemaObject]] = JsonSchemaObject
+
+    def __init__(  # noqa: PLR0913
+        self,
+        source: str | Path | list[Path] | ParseResult,
+        *,
+        data_model_type: type[DataModel] = pydantic_model.BaseModel,
+        data_model_root_type: type[DataModel] = pydantic_model.CustomRootType,
+        data_type_manager_type: type[DataTypeManager] = pydantic_model.DataTypeManager,
+        data_model_field_type: type[DataModelFieldBase] = pydantic_model.DataModelField,
+        base_class: str | None = None,
+        additional_imports: list[str] | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        target_python_version: PythonVersion = PythonVersionMin,
+        dump_resolve_reference_action: Callable[[Iterable[str]], str] | None = None,
+        validation: bool = False,
+        field_constraints: bool = False,
+        snake_case_field: bool = False,
+        strip_default_none: bool = False,
+        aliases: Mapping[str, str] | None = None,
+        allow_population_by_field_name: bool = False,
+        apply_default_values_for_required_fields: bool = False,
+        allow_extra_fields: bool = False,
+        extra_fields: str | None = None,
+        force_optional_for_required_fields: bool = False,
+        class_name: str | None = None,
+        use_standard_collections: bool = False,
+        base_path: Path | None = None,
+        use_schema_description: bool = False,
+        use_field_description: bool = False,
+        use_attribute_docstrings: bool = False,
+        use_inline_field_description: bool = False,
+        use_default_kwarg: bool = False,
+        reuse_model: bool = False,
+        reuse_scope: ReuseScope | None = None,
+        shared_module_name: str = DEFAULT_SHARED_MODULE_NAME,
+        encoding: str = "utf-8",
+        enum_field_as_literal: LiteralType | None = None,
+        use_one_literal_as_default: bool = False,
+        use_enum_values_in_discriminator: bool = False,
+        set_default_enum_member: bool = False,
+        use_subclass_enum: bool = False,
+        use_specialized_enum: bool = True,
+        strict_nullable: bool = False,
+        use_generic_container_types: bool = False,
+        enable_faux_immutability: bool = False,
+        remote_text_cache: DefaultPutDict[str, str] | None = None,
+        disable_appending_item_suffix: bool = False,
+        strict_types: Sequence[StrictTypes] | None = None,
+        empty_enum_field_name: str | None = None,
+        custom_class_name_generator: Callable[[str], str] | None = None,
+        field_extra_keys: set[str] | None = None,
+        field_include_all_keys: bool = False,
+        field_extra_keys_without_x_prefix: set[str] | None = None,
+        wrap_string_literal: bool | None = None,
+        use_title_as_name: bool = False,
+        use_operation_id_as_name: bool = False,
+        use_unique_items_as_set: bool = False,
+        allof_merge_mode: AllOfMergeMode = AllOfMergeMode.Constraints,
+        http_headers: Sequence[tuple[str, str]] | None = None,
+        http_ignore_tls: bool = False,
+        use_annotated: bool = False,
+        use_serialize_as_any: bool = False,
+        use_non_positive_negative_number_constrained_types: bool = False,
+        use_decimal_for_multiple_of: bool = False,
+        original_field_name_delimiter: str | None = None,
+        use_double_quotes: bool = False,
+        use_union_operator: bool = False,
+        allow_responses_without_content: bool = False,
+        collapse_root_models: bool = False,
+        skip_root_model: bool = False,
+        use_type_alias: bool = False,
+        special_field_name_prefix: str | None = None,
+        remove_special_field_name_prefix: bool = False,
+        capitalise_enum_members: bool = False,
+        keep_model_order: bool = False,
+        known_third_party: list[str] | None = None,
+        custom_formatters: list[str] | None = None,
+        custom_formatters_kwargs: dict[str, Any] | None = None,
+        use_pendulum: bool = False,
+        http_query_parameters: Sequence[tuple[str, str]] | None = None,
+        treat_dot_as_module: bool = False,
+        use_exact_imports: bool = False,
+        default_field_extras: dict[str, Any] | None = None,
+        target_datetime_class: DatetimeClassType | None = None,
+        keyword_only: bool = False,
+        frozen_dataclasses: bool = False,
+        no_alias: bool = False,
+        use_frozen_field: bool = False,
+        formatters: list[Formatter] = DEFAULT_FORMATTERS,
+        parent_scoped_naming: bool = False,
+        dataclass_arguments: DataclassArguments | None = None,
+        type_mappings: list[str] | None = None,
+        read_only_write_only_model_type: ReadOnlyWriteOnlyModelType | None = None,
+    ) -> None:
+        """Initialize the JSON Schema parser with configuration options."""
+        target_datetime_class = target_datetime_class or DatetimeClassType.Awaredatetime
+        super().__init__(
+            source=source,
+            data_model_type=data_model_type,
+            data_model_root_type=data_model_root_type,
+            data_type_manager_type=data_type_manager_type,
+            data_model_field_type=data_model_field_type,
+            base_class=base_class,
+            additional_imports=additional_imports,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            target_python_version=target_python_version,
+            dump_resolve_reference_action=dump_resolve_reference_action,
+            validation=validation,
+            field_constraints=field_constraints,
+            snake_case_field=snake_case_field,
+            strip_default_none=strip_default_none,
+            aliases=aliases,
+            allow_population_by_field_name=allow_population_by_field_name,
+            allow_extra_fields=allow_extra_fields,
+            extra_fields=extra_fields,
+            apply_default_values_for_required_fields=apply_default_values_for_required_fields,
+            force_optional_for_required_fields=force_optional_for_required_fields,
+            class_name=class_name,
+            use_standard_collections=use_standard_collections,
+            base_path=base_path,
+            use_schema_description=use_schema_description,
+            use_field_description=use_field_description,
+            use_attribute_docstrings=use_attribute_docstrings,
+            use_inline_field_description=use_inline_field_description,
+            use_default_kwarg=use_default_kwarg,
+            reuse_model=reuse_model,
+            reuse_scope=reuse_scope,
+            shared_module_name=shared_module_name,
+            encoding=encoding,
+            enum_field_as_literal=enum_field_as_literal,
+            use_one_literal_as_default=use_one_literal_as_default,
+            use_enum_values_in_discriminator=use_enum_values_in_discriminator,
+            set_default_enum_member=set_default_enum_member,
+            use_subclass_enum=use_subclass_enum,
+            use_specialized_enum=use_specialized_enum,
+            strict_nullable=strict_nullable,
+            use_generic_container_types=use_generic_container_types,
+            enable_faux_immutability=enable_faux_immutability,
+            remote_text_cache=remote_text_cache,
+            disable_appending_item_suffix=disable_appending_item_suffix,
+            strict_types=strict_types,
+            empty_enum_field_name=empty_enum_field_name,
+            custom_class_name_generator=custom_class_name_generator,
+            field_extra_keys=field_extra_keys,
+            field_include_all_keys=field_include_all_keys,
+            field_extra_keys_without_x_prefix=field_extra_keys_without_x_prefix,
+            wrap_string_literal=wrap_string_literal,
+            use_title_as_name=use_title_as_name,
+            use_operation_id_as_name=use_operation_id_as_name,
+            use_unique_items_as_set=use_unique_items_as_set,
+            allof_merge_mode=allof_merge_mode,
+            http_headers=http_headers,
+            http_ignore_tls=http_ignore_tls,
+            use_annotated=use_annotated,
+            use_serialize_as_any=use_serialize_as_any,
+            use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
+            use_decimal_for_multiple_of=use_decimal_for_multiple_of,
+            original_field_name_delimiter=original_field_name_delimiter,
+            use_double_quotes=use_double_quotes,
+            use_union_operator=use_union_operator,
+            allow_responses_without_content=allow_responses_without_content,
+            collapse_root_models=collapse_root_models,
+            skip_root_model=skip_root_model,
+            use_type_alias=use_type_alias,
+            special_field_name_prefix=special_field_name_prefix,
+            remove_special_field_name_prefix=remove_special_field_name_prefix,
+            capitalise_enum_members=capitalise_enum_members,
+            keep_model_order=keep_model_order,
+            known_third_party=known_third_party,
+            custom_formatters=custom_formatters,
+            custom_formatters_kwargs=custom_formatters_kwargs,
+            use_pendulum=use_pendulum,
+            http_query_parameters=http_query_parameters,
+            treat_dot_as_module=treat_dot_as_module,
+            use_exact_imports=use_exact_imports,
+            default_field_extras=default_field_extras,
+            target_datetime_class=target_datetime_class,
+            keyword_only=keyword_only,
+            frozen_dataclasses=frozen_dataclasses,
+            no_alias=no_alias,
+            use_frozen_field=use_frozen_field,
+            formatters=formatters,
+            parent_scoped_naming=parent_scoped_naming,
+            dataclass_arguments=dataclass_arguments,
+            type_mappings=type_mappings,
+            read_only_write_only_model_type=read_only_write_only_model_type,
+        )
+
+        self.remote_object_cache: DefaultPutDict[str, dict[str, YamlValue]] = DefaultPutDict()
+        self.raw_obj: dict[str, YamlValue] = {}
+        self._root_id: Optional[str] = None  # noqa: UP045
+        self._root_id_base_path: Optional[str] = None  # noqa: UP045
+        self.reserved_refs: defaultdict[tuple[str, ...], set[str]] = defaultdict(set)
+        self.field_keys: set[str] = {
+            *DEFAULT_FIELD_KEYS,
+            *self.field_extra_keys,
+            *self.field_extra_keys_without_x_prefix,
+        }
+
+        if self.data_model_field_type.can_have_extra_keys:
+            self.get_field_extra_key: Callable[[str], str] = (
+                lambda key: self.model_resolver.get_valid_field_name_and_alias(
+                    key, model_type=self.field_name_model_type
+                )[0]
+            )
+
+        else:
+            self.get_field_extra_key = lambda key: key
+
+    def get_field_extras(self, obj: JsonSchemaObject) -> dict[str, Any]:
+        """Extract extra field metadata from a JSON Schema object."""
+        if self.field_include_all_keys:
+            extras = {
+                self.get_field_extra_key(k.lstrip("x-") if k in self.field_extra_keys_without_x_prefix else k): v
+                for k, v in obj.extras.items()
+            }
+        else:
+            extras = {
+                self.get_field_extra_key(k.lstrip("x-") if k in self.field_extra_keys_without_x_prefix else k): v
+                for k, v in obj.extras.items()
+                if k in self.field_keys
+            }
+        if self.default_field_extras:
+            extras.update(self.default_field_extras)
+        return extras
+
+    def _get_type_with_mappings(self, type_: str, format_: str | None = None) -> Types:
+        """Get the Types enum for a given type and format, applying custom type mappings.
+
+        Custom mappings from --type-mappings are checked first, then falls back to
+        the default json_schema_data_formats mappings.
+        """
+        if self.type_mappings and format_ is not None and (type_, format_) in self.type_mappings:
+            target_format = self.type_mappings[type_, format_]
+            for type_formats in json_schema_data_formats.values():
+                if target_format in type_formats:
+                    return type_formats[target_format]
+            if target_format in json_schema_data_formats:
+                return json_schema_data_formats[target_format]["default"]
+
+        return _get_type(type_, format_)
+
+    @cached_property
+    def schema_paths(self) -> list[tuple[str, list[str]]]:
+        """Get schema paths for definitions and defs."""
+        return [(s, s.lstrip("#/").split("/")) for s in self.SCHEMA_PATHS]
+
+    @property
+    def root_id(self) -> str | None:
+        """Get the root $id from the model resolver."""
+        return self.model_resolver.root_id
+
+    @root_id.setter
+    def root_id(self, value: str | None) -> None:
+        """Set the root $id in the model resolver."""
+        self.model_resolver.set_root_id(value)
+
+    def should_parse_enum_as_literal(self, obj: JsonSchemaObject) -> bool:
+        """Determine if an enum should be parsed as a literal type."""
+        return self.enum_field_as_literal == LiteralType.All or (
+            self.enum_field_as_literal == LiteralType.One and len(obj.enum) == 1
+        )
+
+    @classmethod
+    def _extract_const_enum_from_combined(  # noqa: PLR0912
+        cls, items: list[JsonSchemaObject], parent_type: str | list[str] | None
+    ) -> tuple[list[Any], list[str], str | None, bool] | None:
+        """Extract enum values from oneOf/anyOf const pattern."""
+        enum_values: list[Any] = []
+        varnames: list[str] = []
+        nullable = False
+        inferred_type: str | None = None
+
+        for item in items:
+            if item.type == "null" and "const" not in item.extras:
+                nullable = True
+                continue
+
+            if "const" not in item.extras:
+                return None
+
+            if item.ref or item.properties or item.oneOf or item.anyOf or item.allOf:
+                return None
+
+            const_value = item.extras["const"]
+            enum_values.append(const_value)
+
+            if item.title:
+                varnames.append(item.title)
+            else:
+                varnames.append(str(const_value))
+
+            if inferred_type is None and const_value is not None:
+                if isinstance(const_value, str):
+                    inferred_type = "string"
+                elif isinstance(const_value, bool):
+                    inferred_type = "boolean"
+                elif isinstance(const_value, int):
+                    inferred_type = "integer"
+                elif isinstance(const_value, float):
+                    inferred_type = "number"
+
+        if not enum_values:  # pragma: no cover
+            return None
+
+        final_type: str | None
+        if isinstance(parent_type, str):
+            final_type = parent_type
+        elif isinstance(parent_type, list):
+            non_null_types = [t for t in parent_type if t != "null"]
+            final_type = non_null_types[0] if non_null_types else inferred_type
+            if "null" in parent_type:
+                nullable = True
+        else:
+            final_type = inferred_type
+
+        return (enum_values, varnames, final_type, nullable)
+
+    def _create_synthetic_enum_obj(
+        self,
+        original: JsonSchemaObject,
+        enum_values: list[Any],
+        varnames: list[str],
+        enum_type: str | None,
+        nullable: bool,  # noqa: FBT001
+    ) -> JsonSchemaObject:
+        """Create a synthetic JsonSchemaObject for enum parsing."""
+        final_enum = [*enum_values, None] if nullable else enum_values
+        final_varnames = varnames if len(varnames) == len(enum_values) else []
+
+        return self.SCHEMA_OBJECT_TYPE(
+            type=enum_type,
+            enum=final_enum,
+            title=original.title,
+            description=original.description,
+            x_enum_varnames=final_varnames,
+            default=original.default if original.has_default else None,
+        )
+
+    def is_constraints_field(self, obj: JsonSchemaObject) -> bool:
+        """Check if a field should include constraints."""
+        return obj.is_array or (
+            self.field_constraints and not (obj.ref or obj.anyOf or obj.oneOf or obj.allOf or obj.is_object or obj.enum)
+        )
+
+    def _resolve_field_flag(self, obj: JsonSchemaObject, flag: Literal["readOnly", "writeOnly"]) -> bool:
+        """Resolve a field flag (readOnly/writeOnly) from direct value, $ref, and compositions."""
+        if getattr(obj, flag) is True:
+            return True
+        if (
+            self.read_only_write_only_model_type
+            and obj.ref
+            and self._resolve_field_flag(self._load_ref_schema_object(obj.ref), flag)
+        ):
+            return True
+        return any(self._resolve_field_flag(sub, flag) for sub in obj.allOf + obj.anyOf + obj.oneOf)
+
+    def _collect_all_fields_for_request_response(
+        self,
+        fields: list[DataModelFieldBase],
+        base_classes: list[Reference] | None,
+    ) -> list[DataModelFieldBase]:
+        """Collect all fields including those from base classes for Request/Response models.
+
+        Order: parent → child, with child fields overriding parent fields of the same name.
+        """
+        all_fields: list[DataModelFieldBase] = []
+        visited: set[str] = set()
+
+        def iter_from_schema(obj: JsonSchemaObject, path: list[str]) -> Iterable[DataModelFieldBase]:
+            module_name = get_module_name(path[-1] if path else "", None, treat_dot_as_module=self.treat_dot_as_module)
+            if obj.properties:
+                yield from self.parse_object_fields(obj, path, module_name)
+            for item in obj.allOf:
+                if item.ref:
+                    if item.ref in visited:  # pragma: no cover
+                        continue
+                    visited.add(item.ref)
+                    yield from iter_from_schema(self._load_ref_schema_object(item.ref), path)
+                elif item.properties:
+                    yield from self.parse_object_fields(item, path, module_name)
+
+        for base_ref in base_classes or []:
+            if isinstance(base_ref.source, DataModel):
+                all_fields.extend(base_ref.source.iter_all_fields(visited))
+            elif base_ref.path not in visited:  # pragma: no cover
+                visited.add(base_ref.path)
+                all_fields.extend(iter_from_schema(self._load_ref_schema_object(base_ref.path), []))
+        all_fields.extend(fields)
+
+        deduplicated: dict[str, DataModelFieldBase] = {}
+        for field in all_fields:
+            key = field.original_name or field.name
+            if key:  # pragma: no cover
+                deduplicated[key] = field.copy_deep()
+        return list(deduplicated.values())
+
+    def _should_generate_separate_models(
+        self,
+        fields: list[DataModelFieldBase],
+        base_classes: list[Reference] | None,
+    ) -> bool:
+        """Determine if Request/Response models should be generated."""
+        if self.read_only_write_only_model_type is None:
+            return False
+        all_fields = self._collect_all_fields_for_request_response(fields, base_classes)
+        return any(field.read_only or field.write_only for field in all_fields)
+
+    def _should_generate_base_model(self, *, generates_separate_models: bool = False) -> bool:
+        """Determine if Base model should be generated."""
+        if self.read_only_write_only_model_type is None:
+            return True
+        if self.read_only_write_only_model_type == ReadOnlyWriteOnlyModelType.All:
+            return True
+        return not generates_separate_models
+
+    def _create_variant_model(  # noqa: PLR0913, PLR0917
+        self,
+        path: list[str],
+        base_name: str,
+        suffix: str,
+        model_fields: list[DataModelFieldBase],
+        obj: JsonSchemaObject,
+        data_model_type_class: type[DataModel],
+    ) -> None:
+        """Create a Request or Response model variant."""
+        if not model_fields:
+            return
+        variant_name = f"{base_name}{suffix}"
+        unique_name = self.model_resolver.get_class_name(variant_name, unique=True).name
+        model_path = [*path[:-1], unique_name]
+        reference = self.model_resolver.add(model_path, unique_name, class_name=True, unique=False, loaded=True)
+        model = self._create_data_model(
+            model_type=data_model_type_class,
+            reference=reference,
+            fields=model_fields,
+            custom_base_class=obj.custom_base_path or self.base_class,
+            custom_template_dir=self.custom_template_dir,
+            extra_template_data=self.extra_template_data,
+            path=self.current_source_path,
+            description=obj.description if self.use_schema_description else None,
+            nullable=obj.type_has_null,
+            keyword_only=self.keyword_only,
+            treat_dot_as_module=self.treat_dot_as_module,
+            dataclass_arguments=self.dataclass_arguments,
+        )
+        self.results.append(model)
+
+    def _create_request_response_models(  # noqa: PLR0913, PLR0917
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+        fields: list[DataModelFieldBase],
+        data_model_type_class: type[DataModel],
+        base_classes: list[Reference] | None = None,
+    ) -> None:
+        """Generate Request and Response model variants."""
+        all_fields = self._collect_all_fields_for_request_response(fields, base_classes)
+
+        # Request model: exclude readOnly fields
+        if any(field.read_only for field in all_fields):
+            self._create_variant_model(
+                path,
+                name,
+                "Request",
+                [field for field in all_fields if not field.read_only],
+                obj,
+                data_model_type_class,
+            )
+        # Response model: exclude writeOnly fields
+        if any(field.write_only for field in all_fields):
+            self._create_variant_model(
+                path,
+                name,
+                "Response",
+                [field for field in all_fields if not field.write_only],
+                obj,
+                data_model_type_class,
+            )
+
+    def get_object_field(  # noqa: PLR0913
+        self,
+        *,
+        field_name: str | None,
+        field: JsonSchemaObject,
+        required: bool,
+        field_type: DataType,
+        alias: str | None,
+        original_field_name: str | None,
+    ) -> DataModelFieldBase:
+        """Create a data model field from a JSON Schema object field."""
+        return self.data_model_field_type(
+            name=field_name,
+            default=field.default,
+            data_type=field_type,
+            required=required,
+            alias=alias,
+            constraints=field.dict() if self.is_constraints_field(field) else None,
+            nullable=field.nullable if self.strict_nullable and (field.has_default or required) else None,
+            strip_default_none=self.strip_default_none,
+            extras=self.get_field_extras(field),
+            use_annotated=self.use_annotated,
+            use_serialize_as_any=self.use_serialize_as_any,
+            use_field_description=self.use_field_description,
+            use_inline_field_description=self.use_inline_field_description,
+            use_default_kwarg=self.use_default_kwarg,
+            original_name=original_field_name,
+            has_default=field.has_default,
+            type_has_null=field.type_has_null,
+            read_only=self._resolve_field_flag(field, "readOnly"),
+            write_only=self._resolve_field_flag(field, "writeOnly"),
+            use_frozen_field=self.use_frozen_field,
+        )
+
+    def get_data_type(self, obj: JsonSchemaObject) -> DataType:
+        """Get the data type for a JSON Schema object."""
+        if obj.type is None:
+            if "const" in obj.extras:
+                return self.data_type_manager.get_data_type_from_value(obj.extras["const"])
+            return self.data_type_manager.get_data_type(
+                Types.any,
+            )
+
+        def _get_data_type(type_: str, format__: str) -> DataType:
+            return self.data_type_manager.get_data_type(
+                self._get_type_with_mappings(type_, format__),
+                **obj.dict() if not self.field_constraints else {},
+            )
+
+        if isinstance(obj.type, list):
+            return self.data_type(
+                data_types=[_get_data_type(t, obj.format or "default") for t in obj.type if t != "null"],
+                is_optional="null" in obj.type,
+            )
+        return _get_data_type(obj.type, obj.format or "default")
+
+    def get_ref_data_type(self, ref: str) -> DataType:
+        """Get a data type from a reference string."""
+        reference = self.model_resolver.add_ref(ref)
+        ref_schema = self._load_ref_schema_object(ref)
+        is_optional = (
+            ref_schema.type_has_null or ref_schema.type == "null" or (self.strict_nullable and ref_schema.nullable)
+        )
+        return self.data_type(reference=reference, is_optional=is_optional)
+
+    def set_additional_properties(self, path: str, obj: JsonSchemaObject) -> None:
+        """Set additional properties flag in extra template data."""
+        if isinstance(obj.additionalProperties, bool):
+            self.extra_template_data[path]["additionalProperties"] = obj.additionalProperties
+
+    def set_title(self, path: str, obj: JsonSchemaObject) -> None:
+        """Set title in extra template data."""
+        if obj.title:
+            self.extra_template_data[path]["title"] = obj.title
+
+    def _set_schema_metadata(self, path: str, obj: JsonSchemaObject) -> None:
+        """Set title and additionalProperties in extra template data."""
+        if obj.title:
+            self.extra_template_data[path]["title"] = obj.title
+        if isinstance(obj.additionalProperties, bool):
+            self.extra_template_data[path]["additionalProperties"] = obj.additionalProperties
+
+    def _apply_title_as_name(self, name: str, obj: JsonSchemaObject) -> str:
+        """Apply title as name if use_title_as_name is enabled."""
+        if self.use_title_as_name and obj.title:
+            return sanitize_module_name(obj.title, treat_dot_as_module=self.treat_dot_as_module)
+        return name
+
+    def _should_field_be_required(
+        self,
+        *,
+        in_required_list: bool = True,
+        has_default: bool = False,
+        is_nullable: bool = False,
+    ) -> bool:
+        """Determine if a field should be marked as required."""
+        if self.force_optional_for_required_fields:
+            return False
+        if self.apply_default_values_for_required_fields and has_default:  # pragma: no cover
+            return False
+        if is_nullable:
+            return False
+        return in_required_list
+
+    def _deep_merge(self, dict1: dict[Any, Any], dict2: dict[Any, Any]) -> dict[Any, Any]:
+        """Deep merge two dictionaries, combining nested dicts and lists."""
+        result = dict1.copy()
+        for key, value in dict2.items():
+            if key in result:
+                if isinstance(result[key], dict) and isinstance(value, dict):
+                    result[key] = self._deep_merge(result[key], value)
+                    continue
+                if isinstance(result[key], list) and isinstance(value, list):
+                    result[key] = result[key] + value  # noqa: PLR6104
+                    continue
+            result[key] = value
+        return result
+
+    def _load_ref_schema_object(self, ref: str) -> JsonSchemaObject:
+        """Load a JsonSchemaObject from a $ref using standard resolve/load pipeline."""
+        resolved_ref = self.model_resolver.resolve_ref(ref)
+        file_part, fragment = ([*resolved_ref.split("#", 1), ""])[:2]
+        raw_doc = self._get_ref_body(file_part) if file_part else self.raw_obj
+
+        target_schema: dict[str, YamlValue] | YamlValue = raw_doc
+        if fragment:
+            pointer = [p for p in fragment.split("/") if p]
+            target_schema = get_model_by_path(raw_doc, pointer)
+
+        return self.SCHEMA_OBJECT_TYPE.parse_obj(target_schema)
+
+    def _merge_ref_with_schema(self, obj: JsonSchemaObject) -> JsonSchemaObject:
+        """Merge $ref schema with current schema's additional keywords.
+
+        JSON Schema 2020-12 allows $ref alongside other keywords,
+        which should be merged together.
+
+        The local keywords take precedence over referenced schema.
+        """
+        if not obj.ref:
+            return obj
+
+        ref_schema = self._load_ref_schema_object(obj.ref)
+        ref_dict = ref_schema.dict(exclude_unset=True, by_alias=True)
+        current_dict = obj.dict(exclude={"ref"}, exclude_unset=True, by_alias=True)
+        merged = self._deep_merge(ref_dict, current_dict)
+        merged.pop("$ref", None)
+
+        return self.SCHEMA_OBJECT_TYPE.parse_obj(merged)
+
+    def _merge_primitive_schemas(self, items: list[JsonSchemaObject]) -> JsonSchemaObject:
+        """Merge multiple primitive schemas by computing the intersection of their constraints."""
+        if len(items) == 1:
+            return items[0]
+
+        base_dict: dict[str, Any] = {}
+        for item in items:  # pragma: no branch
+            if item.type:  # pragma: no branch
+                base_dict = item.dict(exclude_unset=True, by_alias=True)
+                break
+
+        for item in items:
+            for field in JsonSchemaObject.__constraint_fields__:
+                value = getattr(item, field, None)
+                if value is None:
+                    value = item.extras.get(field)
+                if value is not None:
+                    if field not in base_dict or base_dict[field] is None:
+                        base_dict[field] = value
+                    else:
+                        base_dict[field] = JsonSchemaParser._intersect_constraint(field, base_dict[field], value)
+
+        return self.SCHEMA_OBJECT_TYPE.parse_obj(base_dict)
+
+    def _merge_primitive_schemas_for_allof(self, items: list[JsonSchemaObject]) -> JsonSchemaObject | None:
+        """Merge primitive schemas for allOf, respecting allof_merge_mode setting."""
+        if len(items) == 1:
+            return items[0]  # pragma: no cover
+
+        formats = {item.format for item in items if item.format}
+        if len(formats) > 1:
+            return None
+
+        merged_format = formats.pop() if formats else None
+
+        if self.allof_merge_mode != AllOfMergeMode.NoMerge:
+            merged = self._merge_primitive_schemas(items)
+            merged_dict = merged.dict(exclude_unset=True, by_alias=True)
+            if merged_format:
+                merged_dict["format"] = merged_format
+            return self.SCHEMA_OBJECT_TYPE.parse_obj(merged_dict)
+
+        base_dict: dict[str, Any] = {}
+        for item in items:
+            if item.type:
+                base_dict = item.dict(exclude_unset=True, by_alias=True)
+                break
+
+        for item in items:
+            for constraint_field in JsonSchemaObject.__constraint_fields__:
+                value = getattr(item, constraint_field, None)
+                if value is None:
+                    value = item.extras.get(constraint_field)
+                if value is not None:
+                    base_dict[constraint_field] = value
+
+        if merged_format:
+            base_dict["format"] = merged_format
+
+        return self.SCHEMA_OBJECT_TYPE.parse_obj(base_dict)
+
+    @staticmethod
+    def _intersect_constraint(field: str, val1: Any, val2: Any) -> Any:  # noqa: PLR0911
+        """Compute the intersection of two constraint values."""
+        v1: float | None = None
+        v2: float | None = None
+        with suppress(TypeError, ValueError):
+            v1 = float(val1) if val1 is not None else None
+            v2 = float(val2) if val2 is not None else None
+
+        if field in {"minLength", "minimum", "exclusiveMinimum", "minItems"}:
+            if v1 is not None and v2 is not None:
+                return val1 if v1 >= v2 else val2
+            return val1  # pragma: no cover
+        if field in {"maxLength", "maximum", "exclusiveMaximum", "maxItems"}:
+            if v1 is not None and v2 is not None:
+                return val1 if v1 <= v2 else val2
+            return val1  # pragma: no cover
+        if field == "pattern":
+            return f"(?={val1})(?={val2})" if val1 != val2 else val1
+        if field == "uniqueItems":
+            return val1 or val2
+        return val1
+
+    def _build_allof_type(  # noqa: PLR0911, PLR0912
+        self,
+        allof_items: list[JsonSchemaObject],
+        depth: int,
+        visited: frozenset[int],
+        max_depth: int,
+        max_union_elements: int,
+    ) -> DataType | None:
+        """Build a DataType from allOf schema items."""
+        if len(allof_items) == 1:
+            item = allof_items[0]
+            if item.ref:
+                return self.get_ref_data_type(item.ref)
+            return self._build_lightweight_type(item, depth + 1, visited, max_depth, max_union_elements)
+
+        ref_items: list[JsonSchemaObject] = []
+        primitive_items: list[JsonSchemaObject] = []
+        constraint_only_items: list[JsonSchemaObject] = []
+        object_items: list[JsonSchemaObject] = []
+
+        for item in allof_items:
+            if item.ref:
+                ref_items.append(item)
+            elif item.type and item.type != "object" and not isinstance(item.type, list):
+                primitive_items.append(item)
+            elif item.properties or item.additionalProperties or item.type == "object":
+                object_items.append(item)
+            elif item.allOf or item.anyOf or item.oneOf:
+                nested_type = self._build_lightweight_type(item, depth + 1, visited, max_depth, max_union_elements)
+                if nested_type is None:  # pragma: no cover
+                    return None
+                if nested_type.reference:  # pragma: no cover
+                    ref_items.append(item)
+                else:
+                    primitive_items.append(item)
+            elif item.enum:  # pragma: no cover
+                primitive_items.append(item)
+            elif item.has_constraint:
+                constraint_only_items.append(item)
+
+        if ref_items and not primitive_items and not object_items:
+            ref = ref_items[0].ref
+            if ref:
+                return self.get_ref_data_type(ref)
+            return None  # pragma: no cover
+
+        if ref_items and (primitive_items or object_items or constraint_only_items):
+            ignored_count = len(primitive_items) + len(constraint_only_items)
+            if ignored_count > 0:  # pragma: no branch
+                warn(
+                    f"allOf combines $ref with {ignored_count} constraint(s) that will be ignored "
+                    f"in inherited field type resolution. Consider defining constraints in the referenced schema.",
+                    stacklevel=4,
+                )
+            ref = ref_items[0].ref
+            if ref:
+                return self.get_ref_data_type(ref)
+            return None  # pragma: no cover
+
+        if primitive_items and not object_items:
+            all_primitives = primitive_items + constraint_only_items
+            merged_schema = self._merge_primitive_schemas(all_primitives)
+            return self._build_lightweight_type(merged_schema, depth + 1, visited, max_depth, max_union_elements)
+
+        if object_items:
+            additional_props_types: list[DataType] = []
+
+            for obj_item in object_items:
+                if isinstance(obj_item.additionalProperties, JsonSchemaObject):
+                    ap_type = self._build_lightweight_type(
+                        obj_item.additionalProperties, depth + 1, visited, max_depth, max_union_elements
+                    )
+                    if ap_type:
+                        additional_props_types.append(ap_type)
+
+            if additional_props_types:
+                best_type = additional_props_types[0]
+                for ap_type in additional_props_types[1:]:  # pragma: no branch
+                    is_better = best_type.type == ANY and ap_type.type != ANY
+                    is_better = is_better or (ap_type.reference and not best_type.reference)
+                    if is_better:  # pragma: no cover
+                        best_type = ap_type
+                return self.data_type(data_types=[best_type], is_dict=True)
+
+            return self.data_type(data_types=[DataType(type=ANY, import_=IMPORT_ANY)], is_dict=True)
+
+        return None
+
+    def _build_lightweight_type(  # noqa: PLR0911, PLR0912
+        self,
+        schema: JsonSchemaObject,
+        depth: int = 0,
+        visited: frozenset[int] | None = None,
+        max_depth: int = 3,
+        max_union_elements: int = 5,
+    ) -> DataType | None:
+        """Build a DataType from schema without generating models."""
+        if depth > max_depth:  # pragma: no cover
+            return None
+        if visited is None:
+            visited = frozenset()
+
+        schema_id = id(schema)
+        if schema_id in visited:  # pragma: no cover
+            return None
+        visited |= {schema_id}
+
+        if schema.ref:
+            return self.get_ref_data_type(schema.ref)
+
+        if schema.enum:  # pragma: no cover
+            return self.get_data_type(schema)
+
+        if schema.is_array and schema.items and isinstance(schema.items, JsonSchemaObject):
+            if schema.items.ref:
+                item_type = self.get_ref_data_type(schema.items.ref)
+            else:
+                item_type = self._build_lightweight_type(
+                    schema.items, depth + 1, visited, max_depth, max_union_elements
+                )
+                if item_type is None:  # pragma: no cover
+                    item_type = DataType(type=ANY, import_=IMPORT_ANY)
+            return self.data_type(data_types=[item_type], is_list=True)
+
+        if schema.type and not isinstance(schema.type, list) and schema.type != "object":
+            return self.get_data_type(schema)
+        if isinstance(schema.type, list):
+            return self.get_data_type(schema)
+
+        combined_items = schema.anyOf or schema.oneOf
+        if combined_items:
+            if len(combined_items) > max_union_elements:  # pragma: no cover
+                return None
+            data_types: list[DataType] = []
+            for item in combined_items:
+                if item.ref:  # pragma: no cover
+                    data_types.append(self.get_ref_data_type(item.ref))
+                else:
+                    item_type = self._build_lightweight_type(item, depth + 1, visited, max_depth, max_union_elements)
+                    if item_type is None:  # pragma: no cover
+                        return None
+                    data_types.append(item_type)
+            if len(data_types) == 1:  # pragma: no cover
+                return data_types[0]
+            return self.data_type(data_types=data_types)
+
+        if schema.allOf:  # pragma: no cover
+            return self._build_allof_type(schema.allOf, depth, visited, max_depth, max_union_elements)
+
+        if isinstance(schema.additionalProperties, JsonSchemaObject):  # pragma: no cover
+            value_type = self._build_lightweight_type(
+                schema.additionalProperties, depth + 1, visited, max_depth, max_union_elements
+            )
+            if value_type is None:
+                value_type = DataType(type=ANY, import_=IMPORT_ANY)
+            return self.data_type(data_types=[value_type], is_dict=True)
+
+        if schema.properties or schema.type == "object":
+            return self.data_type(data_types=[DataType(type=ANY, import_=IMPORT_ANY)], is_dict=True)
+
+        return None
+
+    def _is_list_with_any_item_type(self, data_type: DataType | None) -> bool:  # noqa: PLR6301
+        """Return True when data_type represents List[Any] (including nested lists)."""
+        if not data_type:  # pragma: no cover
+            return False
+
+        candidate = data_type
+        if not candidate.is_list and len(candidate.data_types) == 1 and candidate.data_types[0].is_list:
+            candidate = candidate.data_types[0]
+
+        if not candidate.is_list or len(candidate.data_types) != 1:
+            return False
+
+        item_type = candidate.data_types[0]
+        while len(item_type.data_types) == 1:
+            inner = item_type.data_types[0]
+            if (not item_type.is_list and inner.is_list) or item_type.is_list:
+                item_type = inner
+            else:
+                break
+        return item_type.type == ANY
+
+    def _merge_property_schemas(self, parent_dict: dict[str, Any], child_dict: dict[str, Any]) -> dict[str, Any]:
+        """Merge parent and child property schemas for allOf."""
+        if self.allof_merge_mode == AllOfMergeMode.NoMerge:
+            return child_dict.copy()
+
+        non_merged_fields: set[str] = set()
+        if self.allof_merge_mode == AllOfMergeMode.Constraints:
+            non_merged_fields = {"default", "examples", "example"}
+
+        result = {key: value for key, value in parent_dict.items() if key not in non_merged_fields}
+
+        for key, value in child_dict.items():
+            if key in result and isinstance(result[key], dict) and isinstance(value, dict):
+                result[key] = self._merge_property_schemas(result[key], value)
+            else:
+                result[key] = value
+        return result
+
+    def _merge_properties_with_parent_constraints(
+        self, child_obj: JsonSchemaObject, parent_refs: list[str]
+    ) -> JsonSchemaObject:
+        """Merge child properties with parent property constraints for allOf inheritance."""
+        if not child_obj.properties:
+            return child_obj
+
+        parent_properties: dict[str, JsonSchemaObject] = {}
+        for ref in parent_refs:
+            try:
+                parent_schema = self._load_ref_schema_object(ref)
+            except Exception:  # pragma: no cover  # noqa: BLE001, S112
+                continue
+            if parent_schema.properties:
+                for prop_name, prop_schema in parent_schema.properties.items():
+                    if isinstance(prop_schema, JsonSchemaObject) and prop_name not in parent_properties:
+                        parent_properties[prop_name] = prop_schema
+
+        if not parent_properties:
+            return child_obj
+
+        merged_properties: dict[str, JsonSchemaObject | bool] = {}
+        for prop_name, child_prop in child_obj.properties.items():
+            if not isinstance(child_prop, JsonSchemaObject):
+                merged_properties[prop_name] = child_prop
+                continue
+
+            parent_prop = parent_properties.get(prop_name)
+            if parent_prop is None:
+                merged_properties[prop_name] = child_prop
+                continue
+
+            parent_dict = parent_prop.dict(exclude_unset=True, by_alias=True)
+            child_dict = child_prop.dict(exclude_unset=True, by_alias=True)
+            merged_dict = self._merge_property_schemas(parent_dict, child_dict)
+            merged_properties[prop_name] = self.SCHEMA_OBJECT_TYPE.parse_obj(merged_dict)
+
+        merged_obj_dict = child_obj.dict(exclude_unset=True, by_alias=True)
+        merged_obj_dict["properties"] = {
+            k: v.dict(exclude_unset=True, by_alias=True) if isinstance(v, JsonSchemaObject) else v
+            for k, v in merged_properties.items()
+        }
+        return self.SCHEMA_OBJECT_TYPE.parse_obj(merged_obj_dict)
+
+    def _get_inherited_field_type(self, prop_name: str, base_classes: list[Reference]) -> DataType | None:
+        """Get the data type for an inherited property from parent schemas."""
+        for base in base_classes:
+            if not base.path:  # pragma: no cover
+                continue
+            if "#" in base.path:
+                file_part, fragment = base.path.split("#", 1)
+                ref = f"{file_part}#{fragment}" if file_part else f"#{fragment}"
+            else:  # pragma: no cover
+                ref = f"#{base.path}"
+            try:
+                parent_schema = self._load_ref_schema_object(ref)
+            except Exception:  # pragma: no cover  # noqa: BLE001, S112
+                continue
+            if not parent_schema.properties:  # pragma: no cover
+                continue
+            prop_schema = parent_schema.properties.get(prop_name)
+            if not isinstance(prop_schema, JsonSchemaObject):  # pragma: no cover
+                continue
+            result = self._build_lightweight_type(prop_schema)
+            if result is not None:
+                return result
+        return None
+
+    def _schema_signature(self, prop_schema: JsonSchemaObject | bool) -> str | bool:  # noqa: FBT001, PLR6301
+        """Normalize property schema for comparison across allOf items."""
+        if isinstance(prop_schema, bool):
+            return prop_schema
+        return json.dumps(prop_schema.dict(exclude_unset=True, by_alias=True), sort_keys=True, default=repr)
+
+    def _is_root_model_schema(self, obj: JsonSchemaObject) -> bool:  # noqa: PLR6301
+        """Check if schema represents a root model (primitive type with constraints).
+
+        Based on parse_raw_obj() else branch conditions. Returns True when
+        the schema would be processed by parse_root_type().
+        """
+        if obj.is_array:
+            return False
+        if obj.allOf or obj.oneOf or obj.anyOf:
+            return False
+        if obj.properties:
+            return False
+        if obj.patternProperties:
+            return False
+        if obj.type == "object":
+            return False
+        return not obj.enum
+
+    def _handle_allof_root_model_with_constraints(  # noqa: PLR0911, PLR0912
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+    ) -> DataType | None:
+        """Handle allOf that combines a root model $ref with additional constraints.
+
+        This handler is for generating a root model from a root model reference.
+        Object inheritance (with properties) is handled by existing _parse_all_of_item() path.
+        Only applies to named schema definitions, not inline properties.
+        """
+        for path_element in path:
+            if SPECIAL_PATH_MARKER in path_element:
+                return None  # pragma: no cover
+
+        ref_items = [item for item in obj.allOf if item.ref]
+
+        if len(ref_items) != 1:
+            return None
+
+        ref_item = ref_items[0]
+        ref_value = ref_item.ref
+        if ref_value is None:
+            return None  # pragma: no cover
+
+        if ref_item.has_ref_with_schema_keywords:
+            ref_schema = self._merge_ref_with_schema(ref_item)
+        else:
+            ref_schema = self._load_ref_schema_object(ref_value)
+
+        if not self._is_root_model_schema(ref_schema):
+            return None
+
+        constraint_items: list[JsonSchemaObject] = []
+        for item in obj.allOf:
+            if item.ref:
+                continue
+            if item.properties or item.items:
+                return None
+            if item.has_constraint or item.type or item.format:
+                if item.type and ref_schema.type:
+                    compatible_type_pairs = {
+                        ("integer", "number"),
+                        ("number", "integer"),
+                    }
+                    if item.type != ref_schema.type and (item.type, ref_schema.type) not in compatible_type_pairs:
+                        return None
+                constraint_items.append(item)
+
+        if not constraint_items:
+            return None
+
+        all_items = [ref_schema, *constraint_items]
+        merged_schema = self._merge_primitive_schemas_for_allof(all_items)
+        if merged_schema is None:
+            return None
+
+        if obj.description:
+            merged_dict = merged_schema.dict(exclude_unset=True, by_alias=True)
+            merged_dict["description"] = obj.description
+            merged_schema = self.SCHEMA_OBJECT_TYPE.parse_obj(merged_dict)
+
+        return self.parse_root_type(name, merged_schema, path)
+
+    def _merge_all_of_object(self, obj: JsonSchemaObject) -> JsonSchemaObject | None:
+        """Merge allOf items when they share object properties to avoid duplicate models.
+
+        Skip merging when there is exactly one $ref (inheritance with property overrides).
+        Continue merging when multiple $refs share properties to avoid duplicate fields.
+        """
+        ref_count = sum(1 for item in obj.allOf if item.ref)
+        if ref_count == 1:
+            return None
+
+        resolved_items: list[JsonSchemaObject] = []
+        property_signatures: dict[str, set[str | bool]] = {}
+        for item in obj.allOf:
+            resolved_item = self._load_ref_schema_object(item.ref) if item.ref else item
+            resolved_items.append(resolved_item)
+            if resolved_item.properties:
+                for prop_name, prop_schema in resolved_item.properties.items():
+                    property_signatures.setdefault(prop_name, set()).add(self._schema_signature(prop_schema))
+
+        if obj.properties:
+            for prop_name, prop_schema in obj.properties.items():
+                property_signatures.setdefault(prop_name, set()).add(self._schema_signature(prop_schema))
+
+        if not any(len(signatures) > 1 for signatures in property_signatures.values()):
+            return None
+
+        merged_schema: dict[str, Any] = obj.dict(exclude={"allOf"}, exclude_unset=True, by_alias=True)
+        for resolved_item in resolved_items:
+            merged_schema = self._deep_merge(merged_schema, resolved_item.dict(exclude_unset=True, by_alias=True))
+
+        if "required" in merged_schema and isinstance(merged_schema["required"], list):
+            merged_schema["required"] = list(dict.fromkeys(merged_schema["required"]))
+
+        merged_schema.pop("allOf", None)
+        return self.SCHEMA_OBJECT_TYPE.parse_obj(merged_schema)
+
+    def parse_combined_schema(
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+        target_attribute_name: str,
+    ) -> list[DataType]:
+        """Parse combined schema (anyOf, oneOf, allOf) into a list of data types."""
+        base_object = obj.dict(exclude={target_attribute_name}, exclude_unset=True, by_alias=True)
+        combined_schemas: list[JsonSchemaObject] = []
+        refs = []
+        for index, target_attribute in enumerate(getattr(obj, target_attribute_name, [])):
+            if target_attribute.ref:
+                if target_attribute.has_ref_with_schema_keywords:
+                    merged_attr = self._merge_ref_with_schema(target_attribute)
+                    combined_schemas.append(
+                        self.SCHEMA_OBJECT_TYPE.parse_obj(
+                            self._deep_merge(base_object, merged_attr.dict(exclude_unset=True, by_alias=True))
+                        )
+                    )
+                else:
+                    combined_schemas.append(target_attribute)
+                    refs.append(index)
+            else:
+                combined_schemas.append(
+                    self.SCHEMA_OBJECT_TYPE.parse_obj(
+                        self._deep_merge(
+                            base_object,
+                            target_attribute.dict(exclude_unset=True, by_alias=True),
+                        )
+                    )
+                )
+
+        parsed_schemas = self.parse_list_item(
+            name,
+            combined_schemas,
+            path,
+            obj,
+            singular_name=False,
+        )
+        common_path_keyword = f"{target_attribute_name}Common"
+        return [
+            self._parse_object_common_part(
+                name,
+                obj,
+                [*get_special_path(common_path_keyword, path), str(i)],
+                ignore_duplicate_model=True,
+                fields=[],
+                base_classes=[d.reference],
+                required=[],
+            )
+            if i in refs and d.reference
+            else d
+            for i, d in enumerate(parsed_schemas)
+        ]
+
+    def parse_any_of(self, name: str, obj: JsonSchemaObject, path: list[str]) -> list[DataType]:
+        """Parse anyOf schema into a list of data types."""
+        return self.parse_combined_schema(name, obj, path, "anyOf")
+
+    def parse_one_of(self, name: str, obj: JsonSchemaObject, path: list[str]) -> list[DataType]:
+        """Parse oneOf schema into a list of data types."""
+        return self.parse_combined_schema(name, obj, path, "oneOf")
+
+    def _create_data_model(self, model_type: type[DataModel] | None = None, **kwargs: Any) -> DataModel:
+        """Create data model instance with dataclass_arguments support for DataClass."""
+        data_model_class = model_type or self.data_model_type
+        if issubclass(data_model_class, DataClass):
+            # Use dataclass_arguments from kwargs, or fall back to self.dataclass_arguments
+            # If both are None, construct from legacy frozen_dataclasses/keyword_only flags
+            dataclass_arguments = kwargs.pop("dataclass_arguments", None)
+            if dataclass_arguments is None:
+                dataclass_arguments = self.dataclass_arguments
+            if dataclass_arguments is None:
+                # Construct from legacy flags for library API compatibility
+                dataclass_arguments = {}
+                if self.frozen_dataclasses:
+                    dataclass_arguments["frozen"] = True
+                if self.keyword_only:
+                    dataclass_arguments["kw_only"] = True
+            kwargs["dataclass_arguments"] = dataclass_arguments
+            kwargs.pop("frozen", None)
+            kwargs.pop("keyword_only", None)
+        else:
+            kwargs.pop("dataclass_arguments", None)
+        return data_model_class(**kwargs)
+
+    def _parse_object_common_part(  # noqa: PLR0912, PLR0913, PLR0915
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+        *,
+        ignore_duplicate_model: bool,
+        fields: list[DataModelFieldBase],
+        base_classes: list[Reference],
+        required: list[str],
+    ) -> DataType:
+        if self.read_only_write_only_model_type is not None and obj.properties:
+            for prop in obj.properties.values():
+                if isinstance(prop, JsonSchemaObject) and prop.ref:
+                    self._load_ref_schema_object(prop.ref)
+        if obj.properties:
+            fields.extend(
+                self.parse_object_fields(
+                    obj,
+                    path,
+                    get_module_name(name, None, treat_dot_as_module=self.treat_dot_as_module),
+                    class_name=name,
+                )
+            )
+        if base_classes:
+            for field in fields:
+                current_type = field.data_type
+                field_name = field.original_name or field.name
+                if current_type and current_type.type == ANY and field_name:
+                    inherited_type = self._get_inherited_field_type(field_name, base_classes)
+                    if inherited_type is not None:
+                        if PYDANTIC_V2:
+                            new_type = inherited_type.model_copy(deep=True)
+                        else:
+                            new_type = inherited_type.copy(deep=True)
+                        new_type.is_optional = new_type.is_optional or current_type.is_optional
+                        new_type.is_dict = new_type.is_dict or current_type.is_dict
+                        new_type.is_list = new_type.is_list or current_type.is_list
+                        new_type.is_set = new_type.is_set or current_type.is_set
+                        if new_type.kwargs is None and current_type.kwargs is not None:  # pragma: no cover
+                            new_type.kwargs = current_type.kwargs
+                        field.data_type = new_type
+                # Handle List[Any] case: inherit item type from parent if items have Any type
+                elif field_name and self._is_list_with_any_item_type(current_type):
+                    inherited_type = self._get_inherited_field_type(field_name, base_classes)
+                    if inherited_type is None or not inherited_type.is_list or not inherited_type.data_types:
+                        continue
+
+                    new_type = inherited_type.model_copy(deep=True) if PYDANTIC_V2 else inherited_type.copy(deep=True)
+
+                    # Preserve modifiers coming from the overriding schema.
+                    if current_type is not None:  # pragma: no branch
+                        new_type.is_optional = new_type.is_optional or current_type.is_optional
+                        new_type.is_dict = new_type.is_dict or current_type.is_dict
+                        new_type.is_list = new_type.is_list or current_type.is_list
+                        new_type.is_set = new_type.is_set or current_type.is_set
+                        if new_type.kwargs is None and current_type.kwargs is not None:  # pragma: no cover
+                            new_type.kwargs = current_type.kwargs
+
+                    # Some code paths represent the list type inside an outer container.
+                    is_wrapped = (
+                        current_type is not None
+                        and not current_type.is_list
+                        and len(current_type.data_types) == 1
+                        and current_type.data_types[0].is_list
+                    )
+                    if is_wrapped:
+                        wrapper = current_type.model_copy(deep=True) if PYDANTIC_V2 else current_type.copy(deep=True)
+                        wrapper.data_types[0] = new_type
+                        field.data_type = wrapper
+                        continue
+
+                    field.data_type = new_type  # pragma: no cover
+        # ignore an undetected object
+        if ignore_duplicate_model and not fields and len(base_classes) == 1:
+            with self.model_resolver.current_base_path_context(self.model_resolver._base_path):  # noqa: SLF001
+                self.model_resolver.delete(path)
+                return self.data_type(reference=base_classes[0])
+        if required:
+            for field in fields:
+                if self.force_optional_for_required_fields or (  # pragma: no cover
+                    self.apply_default_values_for_required_fields and field.has_default
+                ):
+                    continue  # pragma: no cover
+                if (field.original_name or field.name) in required:
+                    field.required = True
+        if obj.required:
+            field_name_to_field = {f.original_name or f.name: f for f in fields}
+            for required_ in obj.required:
+                if required_ in field_name_to_field:
+                    field = field_name_to_field[required_]
+                    if self.force_optional_for_required_fields or (
+                        self.apply_default_values_for_required_fields and field.has_default
+                    ):
+                        continue
+                    field.required = True
+                else:
+                    fields.append(
+                        self.data_model_field_type(required=True, original_name=required_, data_type=DataType())
+                    )
+        name = self._apply_title_as_name(name, obj)  # pragma: no cover
+        reference = self.model_resolver.add(path, name, class_name=True, loaded=True)
+        self.set_additional_properties(reference.path, obj)
+
+        generates_separate = self._should_generate_separate_models(fields, base_classes)
+        if generates_separate:
+            self._create_request_response_models(
+                name=reference.name,
+                obj=obj,
+                path=path,
+                fields=fields,
+                data_model_type_class=self.data_model_type,
+                base_classes=base_classes,
+            )
+
+        # Generate base model if needed
+        if self._should_generate_base_model(generates_separate_models=generates_separate):
+            data_model_type = self._create_data_model(
+                reference=reference,
+                fields=fields,
+                base_classes=base_classes,
+                custom_base_class=obj.custom_base_path or self.base_class,
+                custom_template_dir=self.custom_template_dir,
+                extra_template_data=self.extra_template_data,
+                path=self.current_source_path,
+                description=obj.description if self.use_schema_description else None,
+                keyword_only=self.keyword_only,
+                treat_dot_as_module=self.treat_dot_as_module,
+                dataclass_arguments=self.dataclass_arguments,
+            )
+            self.results.append(data_model_type)
+
+        return self.data_type(reference=reference)
+
+    def _parse_all_of_item(  # noqa: PLR0912, PLR0913, PLR0917
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+        fields: list[DataModelFieldBase],
+        base_classes: list[Reference],
+        required: list[str],
+        union_models: list[Reference],
+    ) -> None:
+        parent_refs = [item.ref for item in obj.allOf if item.ref]
+
+        for all_of_item in obj.allOf:  # noqa: PLR1702
+            if all_of_item.ref:  # $ref
+                ref_schema = self._load_ref_schema_object(all_of_item.ref)
+
+                if ref_schema.oneOf or ref_schema.anyOf:
+                    self.model_resolver.add(path, name, class_name=True, loaded=True)
+                    if ref_schema.anyOf:
+                        union_models.extend(
+                            d.reference for d in self.parse_any_of(name, ref_schema, path) if d.reference
+                        )
+                    if ref_schema.oneOf:
+                        union_models.extend(
+                            d.reference for d in self.parse_one_of(name, ref_schema, path) if d.reference
+                        )
+                else:
+                    ref = self.model_resolver.add_ref(all_of_item.ref)
+                    if ref.path not in {b.path for b in base_classes}:
+                        base_classes.append(ref)
+            else:
+                # Merge child properties with parent constraints before processing
+                merged_item = self._merge_properties_with_parent_constraints(all_of_item, parent_refs)
+                module_name = get_module_name(name, None, treat_dot_as_module=self.treat_dot_as_module)
+                object_fields = self.parse_object_fields(
+                    merged_item,
+                    path,
+                    module_name,
+                    class_name=name,
+                )
+
+                if object_fields:
+                    fields.extend(object_fields)
+                    if all_of_item.required:
+                        required.extend(all_of_item.required)
+                        field_names: set[str] = set()
+                        for f in object_fields:
+                            if f.original_name:
+                                field_names.add(f.original_name)
+                            elif f.name:  # pragma: no cover
+                                field_names.add(f.name)
+                        existing_field_names: set[str] = set()
+                        for f in fields:
+                            if f.original_name:
+                                existing_field_names.add(f.original_name)
+                            elif f.name:  # pragma: no cover
+                                existing_field_names.add(f.name)
+                        for request in all_of_item.required:
+                            if request in field_names or request in existing_field_names:
+                                continue
+                            if self.force_optional_for_required_fields:
+                                continue
+                            field_name, alias = self.model_resolver.get_valid_field_name_and_alias(
+                                request,
+                                excludes=existing_field_names,
+                                model_type=self.field_name_model_type,
+                                class_name=name,
+                            )
+                            data_type = self._get_inherited_field_type(request, base_classes)
+                            if data_type is None:
+                                data_type = DataType(type=ANY, import_=IMPORT_ANY)
+                            fields.append(
+                                self.data_model_field_type(
+                                    name=field_name,
+                                    required=True,
+                                    original_name=request,
+                                    alias=alias,
+                                    data_type=data_type,
+                                )
+                            )
+                            existing_field_names.update({request, field_name})
+                elif all_of_item.required:
+                    required.extend(all_of_item.required)
+                self._parse_all_of_item(
+                    name,
+                    all_of_item,
+                    path,
+                    fields,
+                    base_classes,
+                    required,
+                    union_models,
+                )
+                if all_of_item.anyOf:
+                    self.model_resolver.add(path, name, class_name=True, loaded=True)
+                    union_models.extend(d.reference for d in self.parse_any_of(name, all_of_item, path) if d.reference)
+                if all_of_item.oneOf:
+                    self.model_resolver.add(path, name, class_name=True, loaded=True)
+                    union_models.extend(d.reference for d in self.parse_one_of(name, all_of_item, path) if d.reference)
+
+    def parse_all_of(
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+        ignore_duplicate_model: bool = False,  # noqa: FBT001, FBT002
+    ) -> DataType:
+        """Parse allOf schema into a single data type with combined properties."""
+        if len(obj.allOf) == 1 and not obj.properties:
+            single_obj = obj.allOf[0]
+            if (
+                single_obj.ref
+                and single_obj.ref_type == JSONReference.LOCAL
+                and get_model_by_path(self.raw_obj, single_obj.ref[2:].split("/")).get("enum")
+            ):
+                ref_data_type = self.get_ref_data_type(single_obj.ref)
+
+                full_path = self.model_resolver.join_path(path)
+                existing_ref = self.model_resolver.references.get(full_path)
+                if existing_ref is not None and not existing_ref.loaded:
+                    reference = self.model_resolver.add(path, name, class_name=True, loaded=True)
+                    field = self.data_model_field_type(
+                        name=None,
+                        data_type=ref_data_type,
+                        required=True,
+                    )
+                    data_model_root = self.data_model_root_type(
+                        reference=reference,
+                        fields=[field],
+                        custom_base_class=obj.custom_base_path or self.base_class,
+                        custom_template_dir=self.custom_template_dir,
+                        extra_template_data=self.extra_template_data,
+                        path=self.current_source_path,
+                        description=obj.description if self.use_schema_description else None,
+                        nullable=obj.type_has_null,
+                        treat_dot_as_module=self.treat_dot_as_module,
+                    )
+                    self.results.append(data_model_root)
+                    return self.data_type(reference=reference)
+
+                return ref_data_type
+
+        merged_all_of_obj = self._merge_all_of_object(obj)
+        if merged_all_of_obj:
+            return self._parse_object_common_part(
+                name,
+                merged_all_of_obj,
+                path,
+                ignore_duplicate_model=ignore_duplicate_model,
+                fields=[],
+                base_classes=[],
+                required=[],
+            )
+
+        root_model_result = self._handle_allof_root_model_with_constraints(name, obj, path)
+        if root_model_result is not None:
+            return root_model_result
+
+        fields: list[DataModelFieldBase] = []
+        base_classes: list[Reference] = []
+        required: list[str] = []
+        union_models: list[Reference] = []
+        self._parse_all_of_item(name, obj, path, fields, base_classes, required, union_models)
+        if not union_models:
+            return self._parse_object_common_part(
+                name,
+                obj,
+                path,
+                ignore_duplicate_model=ignore_duplicate_model,
+                fields=fields,
+                base_classes=base_classes,
+                required=required,
+            )
+        reference = self.model_resolver.add(path, name, class_name=True, loaded=True)
+        all_of_data_type = self._parse_object_common_part(
+            name,
+            obj,
+            get_special_path("allOf", path),
+            ignore_duplicate_model=ignore_duplicate_model,
+            fields=fields,
+            base_classes=base_classes,
+            required=required,
+        )
+        assert all_of_data_type.reference is not None
+        data_type = self.data_type(
+            data_types=[
+                self._parse_object_common_part(
+                    name,
+                    obj,
+                    get_special_path(f"union_model-{index}", path),
+                    ignore_duplicate_model=ignore_duplicate_model,
+                    fields=[],
+                    base_classes=[union_model, all_of_data_type.reference],
+                    required=[],
+                )
+                for index, union_model in enumerate(union_models)
+            ]
+        )
+        field = self.get_object_field(
+            field_name=None,
+            field=obj,
+            required=True,
+            field_type=data_type,
+            alias=None,
+            original_field_name=None,
+        )
+        data_model_root = self.data_model_root_type(
+            reference=reference,
+            fields=[field],
+            custom_base_class=obj.custom_base_path or self.base_class,
+            custom_template_dir=self.custom_template_dir,
+            extra_template_data=self.extra_template_data,
+            path=self.current_source_path,
+            description=obj.description if self.use_schema_description else None,
+            nullable=obj.type_has_null,
+            treat_dot_as_module=self.treat_dot_as_module,
+        )
+        self.results.append(data_model_root)
+        return self.data_type(reference=reference)
+
+    def parse_object_fields(
+        self,
+        obj: JsonSchemaObject,
+        path: list[str],
+        module_name: Optional[str] = None,  # noqa: UP045
+        class_name: Optional[str] = None,  # noqa: UP045
+    ) -> list[DataModelFieldBase]:
+        """Parse object properties into a list of data model fields."""
+        properties: dict[str, JsonSchemaObject | bool] = {} if obj.properties is None else obj.properties
+        requires: set[str] = {*()} if obj.required is None else {*obj.required}
+        fields: list[DataModelFieldBase] = []
+
+        exclude_field_names: set[str] = set()
+        for original_field_name, field in properties.items():
+            field_name, alias = self.model_resolver.get_valid_field_name_and_alias(
+                original_field_name,
+                excludes=exclude_field_names,
+                model_type=self.field_name_model_type,
+                class_name=class_name,
+            )
+            modular_name = f"{module_name}.{field_name}" if module_name else field_name
+
+            exclude_field_names.add(field_name)
+
+            if isinstance(field, bool):
+                fields.append(
+                    self.data_model_field_type(
+                        name=field_name,
+                        data_type=self.data_type_manager.get_data_type(
+                            Types.any,
+                        ),
+                        required=False if self.force_optional_for_required_fields else original_field_name in requires,
+                        alias=alias,
+                        strip_default_none=self.strip_default_none,
+                        use_annotated=self.use_annotated,
+                        use_field_description=self.use_field_description,
+                        use_inline_field_description=self.use_inline_field_description,
+                        original_name=original_field_name,
+                    )
+                )
+                continue
+
+            field_type = self.parse_item(modular_name, field, [*path, field_name])
+
+            if self.force_optional_for_required_fields or (
+                self.apply_default_values_for_required_fields and field.has_default
+            ):
+                required: bool = False
+            else:
+                required = original_field_name in requires
+            fields.append(
+                self.get_object_field(
+                    field_name=field_name,
+                    field=field,
+                    required=required,
+                    field_type=field_type,
+                    alias=alias,
+                    original_field_name=original_field_name,
+                )
+            )
+        return fields
+
+    def parse_object(
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+        singular_name: bool = False,  # noqa: FBT001, FBT002
+        unique: bool = True,  # noqa: FBT001, FBT002
+    ) -> DataType:
+        """Parse object schema into a data model."""
+        if not unique:  # pragma: no cover
+            warn(
+                f"{self.__class__.__name__}.parse_object() ignore `unique` argument."
+                f"An object name must be unique."
+                f"This argument will be removed in a future version",
+                stacklevel=2,
+            )
+        name = self._apply_title_as_name(name, obj)
+        reference = self.model_resolver.add(
+            path,
+            name,
+            class_name=True,
+            singular_name=singular_name,
+            loaded=True,
+        )
+        class_name = reference.name
+        self.set_title(reference.path, obj)
+        if self.read_only_write_only_model_type is not None and obj.properties:
+            for prop in obj.properties.values():
+                if isinstance(prop, JsonSchemaObject) and prop.ref:
+                    self._load_ref_schema_object(prop.ref)
+        fields = self.parse_object_fields(
+            obj,
+            path,
+            get_module_name(class_name, None, treat_dot_as_module=self.treat_dot_as_module),
+            class_name=class_name,
+        )
+        if fields or not isinstance(obj.additionalProperties, JsonSchemaObject):
+            data_model_type_class = self.data_model_type
+        else:
+            fields.append(
+                self.get_object_field(
+                    field_name=None,
+                    field=obj.additionalProperties,
+                    required=True,
+                    original_field_name=None,
+                    field_type=self.data_type(
+                        data_types=[
+                            self.parse_item(
+                                # TODO: Improve naming for nested ClassName
+                                name,
+                                obj.additionalProperties,
+                                [*path, "additionalProperties"],
+                            )
+                        ],
+                        is_dict=True,
+                    ),
+                    alias=None,
+                )
+            )
+            data_model_type_class = self.data_model_root_type
+
+        self.set_additional_properties(reference.path, obj)
+
+        generates_separate = self._should_generate_separate_models(fields, None)
+        if generates_separate:
+            self._create_request_response_models(
+                name=class_name,
+                obj=obj,
+                path=path,
+                fields=fields,
+                data_model_type_class=data_model_type_class,
+            )
+
+        # Generate base model if needed
+        if self._should_generate_base_model(generates_separate_models=generates_separate):
+            data_model_type = self._create_data_model(
+                model_type=data_model_type_class,
+                reference=reference,
+                fields=fields,
+                custom_base_class=obj.custom_base_path or self.base_class,
+                custom_template_dir=self.custom_template_dir,
+                extra_template_data=self.extra_template_data,
+                path=self.current_source_path,
+                description=obj.description if self.use_schema_description else None,
+                nullable=obj.type_has_null,
+                keyword_only=self.keyword_only,
+                treat_dot_as_module=self.treat_dot_as_module,
+                dataclass_arguments=self.dataclass_arguments,
+            )
+            self.results.append(data_model_type)
+
+        return self.data_type(reference=reference)
+
+    def parse_pattern_properties(
+        self,
+        name: str,
+        pattern_properties: dict[str, JsonSchemaObject],
+        path: list[str],
+    ) -> DataType:
+        """Parse patternProperties into a dict data type with regex keys."""
+        return self.data_type(
+            data_types=[
+                self.data_type(
+                    data_types=[
+                        self.parse_item(
+                            name,
+                            kv[1],
+                            get_special_path(f"patternProperties/{i}", path),
+                        )
+                    ],
+                    is_dict=True,
+                    dict_key=self.data_type_manager.get_data_type(
+                        Types.string,
+                        pattern=kv[0] if not self.field_constraints else None,
+                    ),
+                )
+                for i, kv in enumerate(pattern_properties.items())
+            ],
+        )
+
+    def parse_item(  # noqa: PLR0911, PLR0912
+        self,
+        name: str,
+        item: JsonSchemaObject,
+        path: list[str],
+        singular_name: bool = False,  # noqa: FBT001, FBT002
+        parent: JsonSchemaObject | None = None,
+    ) -> DataType:
+        """Parse a single JSON Schema item into a data type."""
+        if self.use_title_as_name and item.title:
+            name = sanitize_module_name(item.title, treat_dot_as_module=self.treat_dot_as_module)
+            singular_name = False
+        if parent and not item.enum and item.has_constraint and (parent.has_constraint or self.field_constraints):
+            root_type_path = get_special_path("array", path)
+            return self.parse_root_type(
+                self.model_resolver.add(
+                    root_type_path,
+                    name,
+                    class_name=True,
+                    singular_name=singular_name,
+                ).name,
+                item,
+                root_type_path,
+            )
+        if item.has_ref_with_schema_keywords:
+            item = self._merge_ref_with_schema(item)
+        if item.ref:
+            return self.get_ref_data_type(item.ref)
+        if item.custom_type_path:  # pragma: no cover
+            return self.data_type_manager.get_data_type_from_full_path(item.custom_type_path, is_custom_type=True)
+        if item.is_array:
+            return self.parse_array_fields(name, item, get_special_path("array", path)).data_type
+        if item.discriminator and parent and parent.is_array and (item.oneOf or item.anyOf):
+            return self.parse_root_type(name, item, path)
+        if item.anyOf:
+            const_enum_data = self._extract_const_enum_from_combined(item.anyOf, item.type)
+            if const_enum_data is not None:
+                enum_values, varnames, enum_type, nullable = const_enum_data
+                synthetic_obj = self._create_synthetic_enum_obj(item, enum_values, varnames, enum_type, nullable)
+                if self.should_parse_enum_as_literal(synthetic_obj):
+                    return self.parse_enum_as_literal(synthetic_obj)
+                return self.parse_enum(name, synthetic_obj, get_special_path("enum", path), singular_name=singular_name)
+            return self.data_type(data_types=self.parse_any_of(name, item, get_special_path("anyOf", path)))
+        if item.oneOf:
+            const_enum_data = self._extract_const_enum_from_combined(item.oneOf, item.type)
+            if const_enum_data is not None:
+                enum_values, varnames, enum_type, nullable = const_enum_data
+                synthetic_obj = self._create_synthetic_enum_obj(item, enum_values, varnames, enum_type, nullable)
+                if self.should_parse_enum_as_literal(synthetic_obj):
+                    return self.parse_enum_as_literal(synthetic_obj)
+                return self.parse_enum(name, synthetic_obj, get_special_path("enum", path), singular_name=singular_name)
+            return self.data_type(data_types=self.parse_one_of(name, item, get_special_path("oneOf", path)))
+        if item.allOf:
+            all_of_path = get_special_path("allOf", path)
+            all_of_path = [self.model_resolver.resolve_ref(all_of_path)]
+            return self.parse_all_of(
+                self.model_resolver.add(all_of_path, name, singular_name=singular_name, class_name=True).name,
+                item,
+                all_of_path,
+                ignore_duplicate_model=True,
+            )
+        if item.is_object or item.patternProperties:
+            object_path = get_special_path("object", path)
+            if item.properties:
+                if item.has_multiple_types and isinstance(item.type, list):
+                    data_types: list[DataType] = []
+                    data_types.append(self.parse_object(name, item, object_path, singular_name=singular_name))
+                    data_types.extend(
+                        self.data_type_manager.get_data_type(
+                            self._get_type_with_mappings(t, item.format or "default"),
+                        )
+                        for t in item.type
+                        if t not in {"object", "null"}
+                    )
+                    return self.data_type(data_types=data_types)
+                return self.parse_object(name, item, object_path, singular_name=singular_name)
+            if item.patternProperties:
+                # support only single key dict.
+                return self.parse_pattern_properties(name, item.patternProperties, object_path)
+            if isinstance(item.additionalProperties, JsonSchemaObject):
+                return self.data_type(
+                    data_types=[self.parse_item(name, item.additionalProperties, object_path)],
+                    is_dict=True,
+                )
+            return self.data_type_manager.get_data_type(
+                Types.object,
+            )
+        if item.enum:
+            if self.should_parse_enum_as_literal(item):
+                return self.parse_enum_as_literal(item)
+            return self.parse_enum(name, item, get_special_path("enum", path), singular_name=singular_name)
+        return self.get_data_type(item)
+
+    def parse_list_item(
+        self,
+        name: str,
+        target_items: list[JsonSchemaObject],
+        path: list[str],
+        parent: JsonSchemaObject,
+        singular_name: bool = True,  # noqa: FBT001, FBT002
+    ) -> list[DataType]:
+        """Parse a list of items into data types."""
+        return [
+            self.parse_item(
+                name,
+                item,
+                [*path, str(index)],
+                singular_name=singular_name,
+                parent=parent,
+            )
+            for index, item in enumerate(target_items)
+        ]
+
+    def parse_array_fields(
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+        singular_name: bool = True,  # noqa: FBT001, FBT002
+    ) -> DataModelFieldBase:
+        """Parse array schema into a data model field with list type."""
+        if self.force_optional_for_required_fields:
+            required: bool = False
+            nullable: Optional[bool] = None  # noqa: UP045
+        else:
+            required = not (obj.has_default and self.apply_default_values_for_required_fields)
+            if self.strict_nullable:
+                nullable = obj.nullable if obj.has_default or required else True
+            else:
+                required = not obj.nullable and required
+                nullable = None
+        if isinstance(obj.items, JsonSchemaObject):
+            items: list[JsonSchemaObject] = [obj.items]
+        elif isinstance(obj.items, list):
+            items = obj.items
+        else:
+            items = []
+
+        if items:
+            item_data_types = self.parse_list_item(
+                name,
+                items,
+                path,
+                obj,
+                singular_name=singular_name,
+            )
+        else:
+            item_data_types = [self.data_type_manager.get_data_type(Types.any)]
+
+        data_types: list[DataType] = [
+            self.data_type(
+                data_types=item_data_types,
+                is_list=True,
+            )
+        ]
+        # TODO: decide special path word for a combined data model.
+        if obj.allOf:
+            data_types.append(self.parse_all_of(name, obj, get_special_path("allOf", path)))
+        elif obj.is_object:
+            data_types.append(self.parse_object(name, obj, get_special_path("object", path)))
+        if obj.enum:
+            data_types.append(self.parse_enum(name, obj, get_special_path("enum", path)))
+        return self.data_model_field_type(
+            data_type=self.data_type(data_types=data_types),
+            default=obj.default,
+            required=required,
+            constraints=obj.dict(),
+            nullable=nullable,
+            strip_default_none=self.strip_default_none,
+            extras=self.get_field_extras(obj),
+            use_annotated=self.use_annotated,
+            use_serialize_as_any=self.use_serialize_as_any,
+            use_field_description=self.use_field_description,
+            use_inline_field_description=self.use_inline_field_description,
+            original_name=None,
+            has_default=obj.has_default,
+        )
+
+    def parse_array(
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+        original_name: str | None = None,
+    ) -> DataType:
+        """Parse array schema into a root model with array type."""
+        name = self._apply_title_as_name(name, obj)
+        reference = self.model_resolver.add(path, name, loaded=True, class_name=True)
+        field = self.parse_array_fields(original_name or name, obj, [*path, name])
+
+        if reference in [d.reference for d in field.data_type.all_data_types if d.reference]:
+            # self-reference
+            field = self.data_model_field_type(
+                data_type=self.data_type(
+                    data_types=[
+                        self.data_type(data_types=field.data_type.data_types[1:], is_list=True),
+                        *field.data_type.data_types[1:],
+                    ]
+                ),
+                default=field.default,
+                required=field.required,
+                constraints=field.constraints,
+                nullable=field.nullable,
+                strip_default_none=field.strip_default_none,
+                extras=field.extras,
+                use_annotated=self.use_annotated,
+                use_field_description=self.use_field_description,
+                use_inline_field_description=self.use_inline_field_description,
+                original_name=None,
+                has_default=field.has_default,
+            )
+
+        data_model_root = self.data_model_root_type(
+            reference=reference,
+            fields=[field],
+            custom_base_class=obj.custom_base_path or self.base_class,
+            custom_template_dir=self.custom_template_dir,
+            extra_template_data=self.extra_template_data,
+            path=self.current_source_path,
+            description=obj.description if self.use_schema_description else None,
+            nullable=obj.type_has_null,
+            treat_dot_as_module=self.treat_dot_as_module,
+        )
+        self.results.append(data_model_root)
+        return self.data_type(reference=reference)
+
+    def parse_root_type(  # noqa: PLR0912
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+    ) -> DataType:
+        """Parse a root-level type into a root model."""
+        reference: Reference | None = None
+        if obj.ref:
+            data_type: DataType = self.get_ref_data_type(obj.ref)
+        elif obj.custom_type_path:
+            data_type = self.data_type_manager.get_data_type_from_full_path(
+                obj.custom_type_path, is_custom_type=True
+            )  # pragma: no cover
+        elif obj.is_array:
+            data_type = self.parse_array_fields(
+                name, obj, get_special_path("array", path)
+            ).data_type  # pragma: no cover
+        elif obj.anyOf or obj.oneOf:
+            combined_items = obj.anyOf or obj.oneOf
+            const_enum_data = self._extract_const_enum_from_combined(combined_items, obj.type)
+            if const_enum_data is not None:  # pragma: no cover
+                enum_values, varnames, enum_type, nullable = const_enum_data
+                synthetic_obj = self._create_synthetic_enum_obj(obj, enum_values, varnames, enum_type, nullable)
+                if self.should_parse_enum_as_literal(synthetic_obj):
+                    data_type = self.parse_enum_as_literal(synthetic_obj)
+                else:
+                    data_type = self.parse_enum(name, synthetic_obj, path)
+            else:
+                reference = self.model_resolver.add(path, name, loaded=True, class_name=True)
+                if obj.anyOf:
+                    data_types: list[DataType] = self.parse_any_of(name, obj, get_special_path("anyOf", path))
+                else:
+                    data_types = self.parse_one_of(name, obj, get_special_path("oneOf", path))
+
+                if len(data_types) > 1:  # pragma: no cover
+                    data_type = self.data_type(data_types=data_types)
+                elif not data_types:  # pragma: no cover
+                    return EmptyDataType()
+                else:  # pragma: no cover
+                    data_type = data_types[0]
+        elif obj.patternProperties:
+            data_type = self.parse_pattern_properties(name, obj.patternProperties, path)
+        elif obj.enum:
+            if self.should_parse_enum_as_literal(obj):
+                data_type = self.parse_enum_as_literal(obj)
+            else:  # pragma: no cover
+                data_type = self.parse_enum(name, obj, path)
+        elif obj.type:
+            data_type = self.get_data_type(obj)
+        else:
+            data_type = self.data_type_manager.get_data_type(
+                Types.any,
+            )
+        required = self._should_field_be_required(
+            has_default=obj.has_default,
+            is_nullable=bool(obj.nullable),
+        )
+        name = self._apply_title_as_name(name, obj)
+        if not reference:
+            reference = self.model_resolver.add(path, name, loaded=True, class_name=True)
+        self._set_schema_metadata(reference.path, obj)
+        data_model_root_type = self.data_model_root_type(
+            reference=reference,
+            fields=[
+                self.data_model_field_type(
+                    data_type=data_type,
+                    default=obj.default,
+                    required=required,
+                    constraints=obj.dict() if self.field_constraints else {},
+                    nullable=obj.nullable if self.strict_nullable else None,
+                    strip_default_none=self.strip_default_none,
+                    extras=self.get_field_extras(obj),
+                    use_annotated=self.use_annotated,
+                    use_field_description=self.use_field_description,
+                    use_inline_field_description=self.use_inline_field_description,
+                    original_name=None,
+                    has_default=obj.has_default,
+                )
+            ],
+            custom_base_class=obj.custom_base_path or self.base_class,
+            custom_template_dir=self.custom_template_dir,
+            extra_template_data=self.extra_template_data,
+            path=self.current_source_path,
+            nullable=obj.type_has_null,
+            treat_dot_as_module=self.treat_dot_as_module,
+            default=obj.default if obj.has_default else UNDEFINED,
+        )
+        self.results.append(data_model_root_type)
+        return self.data_type(reference=reference)
+
+    def _parse_multiple_types_with_properties(
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        type_list: list[str],
+        path: list[str],
+    ) -> None:
+        """Parse a schema with multiple types including object with properties."""
+        data_types: list[DataType] = []
+
+        object_path = get_special_path("object", path)
+        object_data_type = self.parse_object(name, obj, object_path)
+        data_types.append(object_data_type)
+
+        data_types.extend(
+            self.data_type_manager.get_data_type(
+                self._get_type_with_mappings(t, obj.format or "default"),
+            )
+            for t in type_list
+            if t not in {"object", "null"}
+        )
+
+        is_nullable = obj.nullable or obj.type_has_null
+        required = self._should_field_be_required(
+            has_default=obj.has_default,
+            is_nullable=bool(is_nullable),
+        )
+
+        reference = self.model_resolver.add(path, name, loaded=True, class_name=True)
+        self._set_schema_metadata(reference.path, obj)
+
+        data_model_root_type = self.data_model_root_type(
+            reference=reference,
+            fields=[
+                self.data_model_field_type(
+                    data_type=self.data_type(data_types=data_types),
+                    default=obj.default,
+                    required=required,
+                    constraints=obj.dict() if self.field_constraints else {},
+                    nullable=obj.type_has_null if self.strict_nullable else None,
+                    strip_default_none=self.strip_default_none,
+                    extras=self.get_field_extras(obj),
+                    use_annotated=self.use_annotated,
+                    use_field_description=self.use_field_description,
+                    use_inline_field_description=self.use_inline_field_description,
+                    original_name=None,
+                    has_default=obj.has_default,
+                )
+            ],
+            custom_base_class=obj.custom_base_path or self.base_class,
+            custom_template_dir=self.custom_template_dir,
+            extra_template_data=self.extra_template_data,
+            path=self.current_source_path,
+            nullable=obj.type_has_null,
+            treat_dot_as_module=self.treat_dot_as_module,
+            default=obj.default if obj.has_default else UNDEFINED,
+        )
+        self.results.append(data_model_root_type)
+
+    def parse_enum_as_literal(self, obj: JsonSchemaObject) -> DataType:
+        """Parse enum values as a Literal type."""
+        return self.data_type(literals=[i for i in obj.enum if i is not None])
+
+    @classmethod
+    def _get_field_name_from_dict_enum(cls, enum_part: dict[str, Any], index: int) -> str:
+        """Extract field name from dict enum value using title, name, or const keys."""
+        if enum_part.get("title"):
+            return str(enum_part["title"])
+        if enum_part.get("name"):
+            return str(enum_part["name"])
+        if "const" in enum_part:
+            return str(enum_part["const"])
+        return f"value_{index}"
+
+    def parse_enum(
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+        singular_name: bool = False,  # noqa: FBT001, FBT002
+        unique: bool = True,  # noqa: FBT001, FBT002
+    ) -> DataType:
+        """Parse enum schema into an Enum class."""
+        if not unique:  # pragma: no cover
+            warn(
+                f"{self.__class__.__name__}.parse_enum() ignore `unique` argument."
+                f"An object name must be unique."
+                f"This argument will be removed in a future version",
+                stacklevel=2,
+            )
+        enum_fields: list[DataModelFieldBase] = []
+
+        if None in obj.enum and obj.type == "string":
+            # Nullable is valid in only OpenAPI
+            nullable: bool = True
+            enum_times = [e for e in obj.enum if e is not None]
+        else:
+            enum_times = obj.enum
+            nullable = False
+
+        exclude_field_names: set[str] = set()
+
+        enum_names = obj.x_enum_varnames or obj.x_enum_names
+
+        for i, enum_part in enumerate(enum_times):
+            if obj.type == "string" or isinstance(enum_part, str):
+                default = f"'{enum_part.translate(escape_characters)}'" if isinstance(enum_part, str) else enum_part
+                field_name = enum_names[i] if enum_names and i < len(enum_names) and enum_names[i] else str(enum_part)
+            else:
+                default = enum_part
+                if enum_names and i < len(enum_names) and enum_names[i]:
+                    field_name = enum_names[i]
+                elif isinstance(enum_part, dict):
+                    field_name = self._get_field_name_from_dict_enum(enum_part, i)
+                else:
+                    prefix = obj.type if isinstance(obj.type, str) else type(enum_part).__name__
+                    field_name = f"{prefix}_{enum_part}"
+            field_name = self.model_resolver.get_valid_field_name(
+                field_name, excludes=exclude_field_names, model_type=ModelType.ENUM
+            )
+            exclude_field_names.add(field_name)
+            enum_fields.append(
+                self.data_model_field_type(
+                    name=field_name,
+                    default=default,
+                    data_type=self.data_type_manager.get_data_type(
+                        Types.any,
+                    ),
+                    required=True,
+                    strip_default_none=self.strip_default_none,
+                    has_default=obj.has_default,
+                    use_field_description=self.use_field_description,
+                    use_inline_field_description=self.use_inline_field_description,
+                    original_name=None,
+                )
+            )
+
+        if not enum_fields:
+            if not nullable:
+                return self.data_type_manager.get_data_type(Types.null)
+            name = self._apply_title_as_name(name, obj)
+            reference = self.model_resolver.add(
+                path,
+                name,
+                class_name=True,
+                singular_name=singular_name,
+                singular_name_suffix="Enum",
+                loaded=True,
+            )
+            data_model_root_type = self.data_model_root_type(
+                reference=reference,
+                fields=[
+                    self.data_model_field_type(
+                        data_type=self.data_type_manager.get_data_type(Types.null),
+                        default=obj.default,
+                        required=False,
+                        nullable=True,
+                        strip_default_none=self.strip_default_none,
+                        extras=self.get_field_extras(obj),
+                        use_annotated=self.use_annotated,
+                        has_default=obj.has_default,
+                        use_field_description=self.use_field_description,
+                        use_inline_field_description=self.use_inline_field_description,
+                        original_name=None,
+                    )
+                ],
+                custom_base_class=obj.custom_base_path or self.base_class,
+                custom_template_dir=self.custom_template_dir,
+                extra_template_data=self.extra_template_data,
+                path=self.current_source_path,
+                default=obj.default if obj.has_default else UNDEFINED,
+                nullable=obj.type_has_null,
+                treat_dot_as_module=self.treat_dot_as_module,
+            )
+            self.results.append(data_model_root_type)
+            return self.data_type(reference=reference)
+
+        def create_enum(reference_: Reference) -> DataType:
+            type_: Types | None = (
+                self._get_type_with_mappings(obj.type, obj.format) if isinstance(obj.type, str) else None
+            )
+
+            enum_cls: type[Enum] = Enum
+            if (
+                self.use_specialized_enum
+                and type_
+                and (specialized_type := SPECIALIZED_ENUM_TYPE_MATCH.get(type_))
+                # StrEnum is available only in Python 3.11+
+                and (specialized_type != StrEnum or self.target_python_version.has_strenum)
+            ):
+                # If specialized enum is available in the target Python version,
+                # use it and ignore `self.use_subclass_enum` setting.
+                type_ = None
+                enum_cls = specialized_type
+
+            enum = enum_cls(
+                reference=reference_,
+                fields=enum_fields,
+                path=self.current_source_path,
+                description=obj.description if self.use_schema_description else None,
+                custom_template_dir=self.custom_template_dir,
+                type_=type_ if self.use_subclass_enum else None,
+                default=obj.default if obj.has_default else UNDEFINED,
+                treat_dot_as_module=self.treat_dot_as_module,
+            )
+            self.results.append(enum)
+            return self.data_type(reference=reference_)
+
+        name = self._apply_title_as_name(name, obj)
+        reference = self.model_resolver.add(
+            path,
+            name,
+            class_name=True,
+            singular_name=singular_name,
+            singular_name_suffix="Enum",
+            loaded=True,
+        )
+
+        if not nullable:
+            return create_enum(reference)
+
+        enum_reference = self.model_resolver.add(
+            [*path, "Enum"],
+            f"{reference.name}Enum",
+            class_name=True,
+            singular_name=singular_name,
+            singular_name_suffix="Enum",
+            loaded=True,
+        )
+
+        data_model_root_type = self.data_model_root_type(
+            reference=reference,
+            fields=[
+                self.data_model_field_type(
+                    data_type=create_enum(enum_reference),
+                    default=obj.default,
+                    required=False,
+                    nullable=True,
+                    strip_default_none=self.strip_default_none,
+                    extras=self.get_field_extras(obj),
+                    use_annotated=self.use_annotated,
+                    has_default=obj.has_default,
+                    use_field_description=self.use_field_description,
+                    use_inline_field_description=self.use_inline_field_description,
+                    original_name=None,
+                )
+            ],
+            custom_base_class=obj.custom_base_path or self.base_class,
+            custom_template_dir=self.custom_template_dir,
+            extra_template_data=self.extra_template_data,
+            path=self.current_source_path,
+            default=obj.default if obj.has_default else UNDEFINED,
+            nullable=obj.type_has_null,
+            treat_dot_as_module=self.treat_dot_as_module,
+        )
+        self.results.append(data_model_root_type)
+        return self.data_type(reference=reference)
+
+    def _get_ref_body(self, resolved_ref: str) -> dict[str, YamlValue]:
+        """Get the body of a reference from URL or remote file."""
+        if is_url(resolved_ref):
+            return self._get_ref_body_from_url(resolved_ref)
+        return self._get_ref_body_from_remote(resolved_ref)
+
+    def _get_ref_body_from_url(self, ref: str) -> dict[str, YamlValue]:
+        """Get reference body from a URL (HTTP, HTTPS, or file scheme)."""
+        if ref.startswith("file://"):
+            from urllib.parse import urlparse  # noqa: PLC0415
+            from urllib.request import url2pathname  # noqa: PLC0415
+
+            parsed = urlparse(ref)
+            # url2pathname handles percent-decoding and Windows drive letters
+            path = url2pathname(parsed.path)
+            # Handle UNC paths (file://server/share/path)
+            if parsed.netloc:
+                path = f"//{parsed.netloc}{path}"
+            file_path = Path(path)
+            return self.remote_object_cache.get_or_put(
+                ref, default_factory=lambda _: load_yaml_dict_from_path(file_path, self.encoding)
+            )
+        return self.remote_object_cache.get_or_put(
+            ref, default_factory=lambda key: load_yaml_dict(self._get_text_from_url(key))
+        )
+
+    def _get_ref_body_from_remote(self, resolved_ref: str) -> dict[str, YamlValue]:
+        """Get reference body from a remote file path."""
+        # Remote Reference: $ref: 'document.json' Uses the whole document located on the same server and in
+        # the same location. TODO treat edge case
+        full_path = self.base_path / resolved_ref
+
+        return self.remote_object_cache.get_or_put(
+            str(full_path),
+            default_factory=lambda _: load_yaml_dict_from_path(full_path, self.encoding),
+        )
+
+    def resolve_ref(self, object_ref: str) -> Reference:
+        """Resolve a reference by loading and parsing the referenced schema."""
+        reference = self.model_resolver.add_ref(object_ref)
+        if reference.loaded:
+            return reference
+
+        # https://swagger.io/docs/specification/using-ref/
+        ref = self.model_resolver.resolve_ref(object_ref)
+        if get_ref_type(object_ref) == JSONReference.LOCAL or get_ref_type(ref) == JSONReference.LOCAL:
+            self.reserved_refs[tuple(self.model_resolver.current_root)].add(ref)
+            return reference
+        if self.model_resolver.is_after_load(ref):
+            self.reserved_refs[tuple(ref.split("#")[0].split("/"))].add(ref)
+            return reference
+
+        if is_url(ref):
+            relative_path, object_path = ref.split("#")
+            relative_paths = [relative_path]
+            base_path = None
+        else:
+            if self.model_resolver.is_external_root_ref(ref):
+                relative_path, object_path = ref[:-1], ""
+            else:
+                relative_path, object_path = ref.split("#")
+            relative_paths = relative_path.split("/")
+            base_path = Path(*relative_paths).parent
+        with (
+            self.model_resolver.current_base_path_context(base_path),
+            self.model_resolver.base_url_context(relative_path),
+        ):
+            self._parse_file(
+                self._get_ref_body(relative_path),
+                self.model_resolver.add_ref(ref, resolved=True).name,
+                relative_paths,
+                object_path.split("/") if object_path else None,
+            )
+        reference.loaded = True
+        return reference
+
+    def _traverse_schema_objects(  # noqa: PLR0912
+        self,
+        obj: JsonSchemaObject,
+        path: list[str],
+        callback: Callable[[JsonSchemaObject, list[str]], None],
+        *,
+        include_one_of: bool = True,
+    ) -> None:
+        """Traverse schema objects recursively and apply callback."""
+        callback(obj, path)
+        if obj.items:
+            if isinstance(obj.items, JsonSchemaObject):
+                self._traverse_schema_objects(obj.items, path, callback, include_one_of=include_one_of)
+            elif isinstance(obj.items, list):
+                for item in obj.items:
+                    self._traverse_schema_objects(item, path, callback, include_one_of=include_one_of)
+        if isinstance(obj.additionalProperties, JsonSchemaObject):
+            self._traverse_schema_objects(obj.additionalProperties, path, callback, include_one_of=include_one_of)
+        if obj.patternProperties:
+            for value in obj.patternProperties.values():
+                self._traverse_schema_objects(value, path, callback, include_one_of=include_one_of)
+        for item in obj.anyOf:
+            self._traverse_schema_objects(item, path, callback, include_one_of=include_one_of)
+        for item in obj.allOf:
+            self._traverse_schema_objects(item, path, callback, include_one_of=include_one_of)
+        if include_one_of:
+            for item in obj.oneOf:
+                self._traverse_schema_objects(item, path, callback, include_one_of=include_one_of)
+        if obj.properties:
+            for value in obj.properties.values():
+                if isinstance(value, JsonSchemaObject):
+                    self._traverse_schema_objects(value, path, callback, include_one_of=include_one_of)
+
+    def _resolve_ref_callback(self, obj: JsonSchemaObject, path: list[str]) -> None:  # noqa: ARG002
+        """Resolve $ref in schema object."""
+        if obj.ref:
+            self.resolve_ref(obj.ref)
+
+    def _add_id_callback(self, obj: JsonSchemaObject, path: list[str]) -> None:
+        """Add $id to model resolver."""
+        if obj.id:
+            self.model_resolver.add_id(obj.id, path)
+
+    def parse_ref(self, obj: JsonSchemaObject, path: list[str]) -> None:
+        """Recursively parse all $ref references in a schema object."""
+        self._traverse_schema_objects(obj, path, self._resolve_ref_callback)
+
+    def parse_id(self, obj: JsonSchemaObject, path: list[str]) -> None:
+        """Recursively parse all $id fields in a schema object."""
+        self._traverse_schema_objects(obj, path, self._add_id_callback, include_one_of=False)
+
+    @contextmanager
+    def root_id_context(self, root_raw: dict[str, Any]) -> Generator[None, None, None]:
+        """Context manager to temporarily set the root $id during parsing."""
+        previous_root_id = self.root_id
+        self.root_id = root_raw.get("$id") or None
+        yield
+        self.root_id = previous_root_id
+
+    def parse_raw_obj(
+        self,
+        name: str,
+        raw: dict[str, YamlValue] | YamlValue,
+        path: list[str],
+    ) -> None:
+        """Parse a raw dictionary into a JsonSchemaObject and process it."""
+        obj: JsonSchemaObject = (
+            self.SCHEMA_OBJECT_TYPE.model_validate(raw) if PYDANTIC_V2 else self.SCHEMA_OBJECT_TYPE.parse_obj(raw)
+        )
+        self.parse_obj(name, obj, path)
+
+    def parse_obj(  # noqa: PLR0912
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+    ) -> None:
+        """Parse a JsonSchemaObject by dispatching to appropriate parse methods."""
+        if obj.has_ref_with_schema_keywords:
+            obj = self._merge_ref_with_schema(obj)
+
+        if obj.is_array:
+            self.parse_array(name, obj, path)
+        elif obj.allOf:
+            self.parse_all_of(name, obj, path)
+        elif obj.oneOf or obj.anyOf:
+            combined_items = obj.oneOf or obj.anyOf
+            const_enum_data = self._extract_const_enum_from_combined(combined_items, obj.type)
+            if const_enum_data is not None:
+                enum_values, varnames, enum_type, nullable = const_enum_data
+                synthetic_obj = self._create_synthetic_enum_obj(obj, enum_values, varnames, enum_type, nullable)
+                if not self.should_parse_enum_as_literal(synthetic_obj):
+                    self.parse_enum(name, synthetic_obj, path)
+                else:
+                    self.parse_root_type(name, synthetic_obj, path)
+            else:
+                data_type = self.parse_root_type(name, obj, path)
+                if isinstance(data_type, EmptyDataType) and obj.properties:
+                    self.parse_object(name, obj, path)  # pragma: no cover
+        elif obj.properties:
+            if obj.has_multiple_types and isinstance(obj.type, list):
+                self._parse_multiple_types_with_properties(name, obj, obj.type, path)
+            else:
+                self.parse_object(name, obj, path)
+        elif obj.patternProperties:
+            self.parse_root_type(name, obj, path)
+        elif obj.type == "object":
+            self.parse_object(name, obj, path)
+        elif obj.enum and not self.should_parse_enum_as_literal(obj):
+            self.parse_enum(name, obj, path)
+        else:
+            self.parse_root_type(name, obj, path)
+        self.parse_ref(obj, path)
+
+    def _get_context_source_path_parts(self) -> Iterator[tuple[Source, list[str]]]:
+        """Get source and path parts for each input file with context managers."""
+        if isinstance(self.source, list) or (isinstance(self.source, Path) and self.source.is_dir()):
+            self.current_source_path = Path()
+            self.model_resolver.after_load_files = {
+                self.base_path.joinpath(s.path).resolve().as_posix() for s in self.iter_source
+            }
+
+        for source in self.iter_source:
+            if isinstance(self.source, ParseResult):
+                path_parts = self.get_url_path_parts(self.source)
+            else:
+                path_parts = list(source.path.parts)
+            if self.current_source_path is not None:
+                self.current_source_path = source.path
+            with (
+                self.model_resolver.current_base_path_context(source.path.parent),
+                self.model_resolver.current_root_context(path_parts),
+            ):
+                yield source, path_parts
+
+    def parse_raw(self) -> None:
+        """Parse all raw input sources into data models."""
+        for source, path_parts in self._get_context_source_path_parts():
+            raw_obj = load_yaml(source.text)
+            if not isinstance(raw_obj, dict):  # pragma: no cover
+                warn(f"{source.path} is empty or not a dict. Skipping this file", stacklevel=2)
+                continue
+            self.raw_obj = raw_obj
+            title = self.raw_obj.get("title")
+            title_str = str(title) if title is not None else "Model"
+            if self.custom_class_name_generator:
+                obj_name = title_str
+            else:
+                if self.class_name:
+                    obj_name = self.class_name
+                else:
+                    # backward compatible
+                    obj_name = title_str
+                    if not self.model_resolver.validate_name(obj_name):
+                        obj_name = title_to_class_name(obj_name)
+                if not self.model_resolver.validate_name(obj_name):
+                    raise InvalidClassNameError(obj_name)
+            self._parse_file(self.raw_obj, obj_name, path_parts)
+
+        self._resolve_unparsed_json_pointer()
+
+    def _resolve_unparsed_json_pointer(self) -> None:
+        """Resolve any remaining unparsed JSON pointer references recursively."""
+        model_count: int = len(self.results)
+        for source in self.iter_source:
+            path_parts = list(source.path.parts)
+            if not (reserved_refs := self.reserved_refs.get(tuple(path_parts))):
+                continue
+            if self.current_source_path is not None:
+                self.current_source_path = source.path
+
+            with (
+                self.model_resolver.current_base_path_context(source.path.parent),
+                self.model_resolver.current_root_context(path_parts),
+            ):
+                for reserved_ref in sorted(reserved_refs):
+                    if self.model_resolver.add_ref(reserved_ref, resolved=True).loaded:
+                        continue
+                    # for root model
+                    self.raw_obj = load_yaml_dict(source.text)
+                    self.parse_json_pointer(self.raw_obj, reserved_ref, path_parts)
+
+        if model_count != len(self.results):
+            # New model have been generated. It try to resolve json pointer again.
+            self._resolve_unparsed_json_pointer()
+
+    def parse_json_pointer(self, raw: dict[str, YamlValue], ref: str, path_parts: list[str]) -> None:
+        """Parse a JSON pointer reference into a model."""
+        path = ref.split("#", 1)[-1]
+        if path[0] == "/":  # pragma: no cover
+            path = path[1:]
+        object_paths = path.split("/")
+        models = get_model_by_path(raw, object_paths)
+        model_name = object_paths[-1]
+
+        self.parse_raw_obj(model_name, models, [*path_parts, f"#/{object_paths[0]}", *object_paths[1:]])
+
+    def _parse_file(
+        self,
+        raw: dict[str, Any],
+        obj_name: str,
+        path_parts: list[str],
+        object_paths: list[str] | None = None,
+    ) -> None:
+        """Parse a file containing JSON Schema definitions and references."""
+        object_paths = [o for o in object_paths or [] if o]
+        path = [*path_parts, f"#/{object_paths[0]}", *object_paths[1:]] if object_paths else path_parts
+        with self.model_resolver.current_root_context(path_parts):
+            obj_name = self.model_resolver.add(path, obj_name, unique=False, class_name=True).name
+            with self.root_id_context(raw):
+                # Some jsonschema docs include attribute self to have include version details
+                raw.pop("self", None)
+                # parse $id before parsing $ref
+                root_obj = self.SCHEMA_OBJECT_TYPE.parse_obj(raw)
+                self.parse_id(root_obj, path_parts)
+                definitions: dict[str, YamlValue] = {}
+                schema_path = ""
+                for schema_path_candidate, split_schema_path in self.schema_paths:
+                    try:
+                        if definitions := get_model_by_path(raw, split_schema_path):
+                            schema_path = schema_path_candidate
+                            break
+                    except KeyError:  # pragma: no cover
+                        continue
+
+                for key, model in definitions.items():
+                    obj = self.SCHEMA_OBJECT_TYPE.parse_obj(model)
+                    self.parse_id(obj, [*path_parts, schema_path, key])
+
+                if object_paths:
+                    models = get_model_by_path(raw, object_paths)
+                    model_name = object_paths[-1]
+                    self.parse_obj(model_name, self.SCHEMA_OBJECT_TYPE.parse_obj(models), path)
+                elif not self.skip_root_model:
+                    self.parse_obj(obj_name, root_obj, path_parts or ["#"])
+                for key, model in definitions.items():
+                    path = [*path_parts, schema_path, key]
+                    reference = self.model_resolver.get(path)
+                    if not reference or not reference.loaded:
+                        self.parse_raw_obj(key, model, path)
+
+                key = tuple(path_parts)
+                reserved_refs = set(self.reserved_refs.get(key) or [])
+                while reserved_refs:
+                    for reserved_path in sorted(reserved_refs):
+                        reference = self.model_resolver.references.get(reserved_path)
+                        if not reference or reference.loaded:
+                            continue
+                        object_paths = reserved_path.split("#/", 1)[-1].split("/")
+                        path = reserved_path.split("/")
+                        models = get_model_by_path(raw, object_paths)
+                        model_name = object_paths[-1]
+                        self.parse_obj(model_name, self.SCHEMA_OBJECT_TYPE.parse_obj(models), path)
+                    previous_reserved_refs = reserved_refs
+                    reserved_refs = set(self.reserved_refs.get(key) or [])
+                    if previous_reserved_refs == reserved_refs:
+                        break
diff -pruN 0.26.4-3/src/datamodel_code_generator/parser/openapi.py 0.45.0-1/src/datamodel_code_generator/parser/openapi.py
--- 0.26.4-3/src/datamodel_code_generator/parser/openapi.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/parser/openapi.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,830 @@
+"""OpenAPI and Swagger specification parser.
+
+Extends JsonSchemaParser to handle OpenAPI 2.0 (Swagger), 3.0, and 3.1
+specifications, including paths, operations, parameters, and request/response bodies.
+"""
+
+from __future__ import annotations
+
+import re
+from collections import defaultdict
+from contextlib import nullcontext
+from enum import Enum
+from pathlib import Path
+from re import Pattern
+from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, TypeVar, Union
+from warnings import warn
+
+from pydantic import Field
+
+from datamodel_code_generator import (
+    DEFAULT_SHARED_MODULE_NAME,
+    AllOfMergeMode,
+    DataclassArguments,
+    Error,
+    LiteralType,
+    OpenAPIScope,
+    PythonVersion,
+    PythonVersionMin,
+    ReadOnlyWriteOnlyModelType,
+    ReuseScope,
+    YamlValue,
+    load_yaml_dict,
+    snooper_to_methods,
+)
+from datamodel_code_generator.format import DEFAULT_FORMATTERS, DatetimeClassType, Formatter
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model import pydantic as pydantic_model
+from datamodel_code_generator.parser.base import get_special_path
+from datamodel_code_generator.parser.jsonschema import (
+    JsonSchemaObject,
+    JsonSchemaParser,
+    get_model_by_path,
+)
+from datamodel_code_generator.reference import FieldNameResolver, is_url, snake_to_upper_camel
+from datamodel_code_generator.types import (
+    DataType,
+    DataTypeManager,
+    EmptyDataType,
+    StrictTypes,
+)
+from datamodel_code_generator.util import BaseModel
+
+if TYPE_CHECKING:
+    from collections.abc import Iterable, Mapping, Sequence
+    from urllib.parse import ParseResult
+
+    from datamodel_code_generator.parser import DefaultPutDict
+
+
+RE_APPLICATION_JSON_PATTERN: Pattern[str] = re.compile(r"^application/.*json$")
+
+OPERATION_NAMES: list[str] = [
+    "get",
+    "put",
+    "post",
+    "delete",
+    "patch",
+    "head",
+    "options",
+    "trace",
+]
+
+
+class ParameterLocation(Enum):
+    """Represent OpenAPI parameter locations."""
+
+    query = "query"
+    header = "header"
+    path = "path"
+    cookie = "cookie"
+
+
+BaseModelT = TypeVar("BaseModelT", bound=BaseModel)
+
+
+class ReferenceObject(BaseModel):
+    """Represent an OpenAPI reference object ($ref)."""
+
+    ref: str = Field(..., alias="$ref")
+
+
+class ExampleObject(BaseModel):
+    """Represent an OpenAPI example object."""
+
+    summary: Optional[str] = None  # noqa: UP045
+    description: Optional[str] = None  # noqa: UP045
+    value: YamlValue = None
+    externalValue: Optional[str] = None  # noqa: N815, UP045
+
+
+class MediaObject(BaseModel):
+    """Represent an OpenAPI media type object."""
+
+    schema_: Optional[Union[ReferenceObject, JsonSchemaObject]] = Field(None, alias="schema")  # noqa: UP007, UP045
+    example: YamlValue = None
+    examples: Optional[Union[str, ReferenceObject, ExampleObject]] = None  # noqa: UP007, UP045
+
+
+class ParameterObject(BaseModel):
+    """Represent an OpenAPI parameter object."""
+
+    name: Optional[str] = None  # noqa: UP045
+    in_: Optional[ParameterLocation] = Field(None, alias="in")  # noqa: UP045
+    description: Optional[str] = None  # noqa: UP045
+    required: bool = False
+    deprecated: bool = False
+    schema_: Optional[JsonSchemaObject] = Field(None, alias="schema")  # noqa: UP045
+    example: YamlValue = None
+    examples: Optional[Union[str, ReferenceObject, ExampleObject]] = None  # noqa: UP007, UP045
+    content: dict[str, MediaObject] = {}  # noqa: RUF012
+
+
+class HeaderObject(BaseModel):
+    """Represent an OpenAPI header object."""
+
+    description: Optional[str] = None  # noqa: UP045
+    required: bool = False
+    deprecated: bool = False
+    schema_: Optional[JsonSchemaObject] = Field(None, alias="schema")  # noqa: UP045
+    example: YamlValue = None
+    examples: Optional[Union[str, ReferenceObject, ExampleObject]] = None  # noqa: UP007, UP045
+    content: dict[str, MediaObject] = {}  # noqa: RUF012
+
+
+class RequestBodyObject(BaseModel):
+    """Represent an OpenAPI request body object."""
+
+    description: Optional[str] = None  # noqa: UP045
+    content: dict[str, MediaObject] = {}  # noqa: RUF012
+    required: bool = False
+
+
+class ResponseObject(BaseModel):
+    """Represent an OpenAPI response object."""
+
+    description: Optional[str] = None  # noqa: UP045
+    headers: dict[str, ParameterObject] = {}  # noqa: RUF012
+    content: dict[Union[str, int], MediaObject] = {}  # noqa: RUF012, UP007
+
+
+class Operation(BaseModel):
+    """Represent an OpenAPI operation object."""
+
+    tags: list[str] = []  # noqa: RUF012
+    summary: Optional[str] = None  # noqa: UP045
+    description: Optional[str] = None  # noqa: UP045
+    operationId: Optional[str] = None  # noqa: N815, UP045
+    parameters: list[Union[ReferenceObject, ParameterObject]] = []  # noqa: RUF012, UP007
+    requestBody: Optional[Union[ReferenceObject, RequestBodyObject]] = None  # noqa: N815, UP007, UP045
+    responses: dict[Union[str, int], Union[ReferenceObject, ResponseObject]] = {}  # noqa: RUF012, UP007
+    deprecated: bool = False
+
+
+class ComponentsObject(BaseModel):
+    """Represent an OpenAPI components object."""
+
+    schemas: dict[str, Union[ReferenceObject, JsonSchemaObject]] = {}  # noqa: RUF012, UP007
+    responses: dict[str, Union[ReferenceObject, ResponseObject]] = {}  # noqa: RUF012, UP007
+    examples: dict[str, Union[ReferenceObject, ExampleObject]] = {}  # noqa: RUF012, UP007
+    requestBodies: dict[str, Union[ReferenceObject, RequestBodyObject]] = {}  # noqa: N815, RUF012, UP007
+    headers: dict[str, Union[ReferenceObject, HeaderObject]] = {}  # noqa: RUF012, UP007
+
+
+@snooper_to_methods()
+class OpenAPIParser(JsonSchemaParser):
+    """Parser for OpenAPI 2.0/3.0/3.1 and Swagger specifications."""
+
+    SCHEMA_PATHS: ClassVar[list[str]] = ["#/components/schemas"]
+
+    def __init__(  # noqa: PLR0913
+        self,
+        source: str | Path | list[Path] | ParseResult,
+        *,
+        data_model_type: type[DataModel] = pydantic_model.BaseModel,
+        data_model_root_type: type[DataModel] = pydantic_model.CustomRootType,
+        data_type_manager_type: type[DataTypeManager] = pydantic_model.DataTypeManager,
+        data_model_field_type: type[DataModelFieldBase] = pydantic_model.DataModelField,
+        base_class: str | None = None,
+        additional_imports: list[str] | None = None,
+        custom_template_dir: Path | None = None,
+        extra_template_data: defaultdict[str, dict[str, Any]] | None = None,
+        target_python_version: PythonVersion = PythonVersionMin,
+        dump_resolve_reference_action: Callable[[Iterable[str]], str] | None = None,
+        validation: bool = False,
+        field_constraints: bool = False,
+        snake_case_field: bool = False,
+        strip_default_none: bool = False,
+        aliases: Mapping[str, str] | None = None,
+        allow_population_by_field_name: bool = False,
+        allow_extra_fields: bool = False,
+        extra_fields: str | None = None,
+        apply_default_values_for_required_fields: bool = False,
+        force_optional_for_required_fields: bool = False,
+        class_name: str | None = None,
+        use_standard_collections: bool = False,
+        base_path: Path | None = None,
+        use_schema_description: bool = False,
+        use_field_description: bool = False,
+        use_attribute_docstrings: bool = False,
+        use_inline_field_description: bool = False,
+        use_default_kwarg: bool = False,
+        reuse_model: bool = False,
+        reuse_scope: ReuseScope | None = None,
+        shared_module_name: str = DEFAULT_SHARED_MODULE_NAME,
+        encoding: str = "utf-8",
+        enum_field_as_literal: LiteralType | None = None,
+        use_one_literal_as_default: bool = False,
+        use_enum_values_in_discriminator: bool = False,
+        set_default_enum_member: bool = False,
+        use_subclass_enum: bool = False,
+        use_specialized_enum: bool = True,
+        strict_nullable: bool = False,
+        use_generic_container_types: bool = False,
+        enable_faux_immutability: bool = False,
+        remote_text_cache: DefaultPutDict[str, str] | None = None,
+        disable_appending_item_suffix: bool = False,
+        strict_types: Sequence[StrictTypes] | None = None,
+        empty_enum_field_name: str | None = None,
+        custom_class_name_generator: Callable[[str], str] | None = None,
+        field_extra_keys: set[str] | None = None,
+        field_include_all_keys: bool = False,
+        field_extra_keys_without_x_prefix: set[str] | None = None,
+        openapi_scopes: list[OpenAPIScope] | None = None,
+        include_path_parameters: bool = False,
+        wrap_string_literal: bool | None = False,
+        use_title_as_name: bool = False,
+        use_operation_id_as_name: bool = False,
+        use_unique_items_as_set: bool = False,
+        allof_merge_mode: AllOfMergeMode = AllOfMergeMode.Constraints,
+        http_headers: Sequence[tuple[str, str]] | None = None,
+        http_ignore_tls: bool = False,
+        use_annotated: bool = False,
+        use_serialize_as_any: bool = False,
+        use_non_positive_negative_number_constrained_types: bool = False,
+        use_decimal_for_multiple_of: bool = False,
+        original_field_name_delimiter: str | None = None,
+        use_double_quotes: bool = False,
+        use_union_operator: bool = False,
+        allow_responses_without_content: bool = False,
+        collapse_root_models: bool = False,
+        skip_root_model: bool = False,
+        use_type_alias: bool = False,
+        special_field_name_prefix: str | None = None,
+        remove_special_field_name_prefix: bool = False,
+        capitalise_enum_members: bool = False,
+        keep_model_order: bool = False,
+        known_third_party: list[str] | None = None,
+        custom_formatters: list[str] | None = None,
+        custom_formatters_kwargs: dict[str, Any] | None = None,
+        use_pendulum: bool = False,
+        http_query_parameters: Sequence[tuple[str, str]] | None = None,
+        treat_dot_as_module: bool = False,
+        use_exact_imports: bool = False,
+        default_field_extras: dict[str, Any] | None = None,
+        target_datetime_class: DatetimeClassType | None = None,
+        keyword_only: bool = False,
+        frozen_dataclasses: bool = False,
+        no_alias: bool = False,
+        formatters: list[Formatter] = DEFAULT_FORMATTERS,
+        parent_scoped_naming: bool = False,
+        dataclass_arguments: DataclassArguments | None = None,
+        type_mappings: list[str] | None = None,
+        read_only_write_only_model_type: ReadOnlyWriteOnlyModelType | None = None,
+        use_frozen_field: bool = False,
+    ) -> None:
+        """Initialize the OpenAPI parser with extensive configuration options."""
+        target_datetime_class = target_datetime_class or DatetimeClassType.Awaredatetime
+        super().__init__(
+            source=source,
+            data_model_type=data_model_type,
+            data_model_root_type=data_model_root_type,
+            data_type_manager_type=data_type_manager_type,
+            data_model_field_type=data_model_field_type,
+            base_class=base_class,
+            additional_imports=additional_imports,
+            custom_template_dir=custom_template_dir,
+            extra_template_data=extra_template_data,
+            target_python_version=target_python_version,
+            dump_resolve_reference_action=dump_resolve_reference_action,
+            validation=validation,
+            field_constraints=field_constraints,
+            snake_case_field=snake_case_field,
+            strip_default_none=strip_default_none,
+            aliases=aliases,
+            allow_population_by_field_name=allow_population_by_field_name,
+            allow_extra_fields=allow_extra_fields,
+            extra_fields=extra_fields,
+            apply_default_values_for_required_fields=apply_default_values_for_required_fields,
+            force_optional_for_required_fields=force_optional_for_required_fields,
+            class_name=class_name,
+            use_standard_collections=use_standard_collections,
+            base_path=base_path,
+            use_schema_description=use_schema_description,
+            use_field_description=use_field_description,
+            use_attribute_docstrings=use_attribute_docstrings,
+            use_inline_field_description=use_inline_field_description,
+            use_default_kwarg=use_default_kwarg,
+            reuse_model=reuse_model,
+            reuse_scope=reuse_scope,
+            shared_module_name=shared_module_name,
+            encoding=encoding,
+            enum_field_as_literal=enum_field_as_literal,
+            use_one_literal_as_default=use_one_literal_as_default,
+            use_enum_values_in_discriminator=use_enum_values_in_discriminator,
+            set_default_enum_member=set_default_enum_member,
+            use_subclass_enum=use_subclass_enum,
+            use_specialized_enum=use_specialized_enum,
+            strict_nullable=strict_nullable,
+            use_generic_container_types=use_generic_container_types,
+            enable_faux_immutability=enable_faux_immutability,
+            remote_text_cache=remote_text_cache,
+            disable_appending_item_suffix=disable_appending_item_suffix,
+            strict_types=strict_types,
+            empty_enum_field_name=empty_enum_field_name,
+            custom_class_name_generator=custom_class_name_generator,
+            field_extra_keys=field_extra_keys,
+            field_include_all_keys=field_include_all_keys,
+            field_extra_keys_without_x_prefix=field_extra_keys_without_x_prefix,
+            wrap_string_literal=wrap_string_literal,
+            use_title_as_name=use_title_as_name,
+            use_operation_id_as_name=use_operation_id_as_name,
+            use_unique_items_as_set=use_unique_items_as_set,
+            allof_merge_mode=allof_merge_mode,
+            http_headers=http_headers,
+            http_ignore_tls=http_ignore_tls,
+            use_annotated=use_annotated,
+            use_serialize_as_any=use_serialize_as_any,
+            use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types,
+            use_decimal_for_multiple_of=use_decimal_for_multiple_of,
+            original_field_name_delimiter=original_field_name_delimiter,
+            use_double_quotes=use_double_quotes,
+            use_union_operator=use_union_operator,
+            allow_responses_without_content=allow_responses_without_content,
+            collapse_root_models=collapse_root_models,
+            skip_root_model=skip_root_model,
+            use_type_alias=use_type_alias,
+            special_field_name_prefix=special_field_name_prefix,
+            remove_special_field_name_prefix=remove_special_field_name_prefix,
+            capitalise_enum_members=capitalise_enum_members,
+            keep_model_order=keep_model_order,
+            known_third_party=known_third_party,
+            custom_formatters=custom_formatters,
+            custom_formatters_kwargs=custom_formatters_kwargs,
+            use_pendulum=use_pendulum,
+            http_query_parameters=http_query_parameters,
+            treat_dot_as_module=treat_dot_as_module,
+            use_exact_imports=use_exact_imports,
+            default_field_extras=default_field_extras,
+            target_datetime_class=target_datetime_class,
+            keyword_only=keyword_only,
+            frozen_dataclasses=frozen_dataclasses,
+            no_alias=no_alias,
+            formatters=formatters,
+            parent_scoped_naming=parent_scoped_naming,
+            dataclass_arguments=dataclass_arguments,
+            type_mappings=type_mappings,
+            read_only_write_only_model_type=read_only_write_only_model_type,
+            use_frozen_field=use_frozen_field,
+        )
+        self.open_api_scopes: list[OpenAPIScope] = openapi_scopes or [OpenAPIScope.Schemas]
+        self.include_path_parameters: bool = include_path_parameters
+        self._discriminator_schemas: dict[str, dict[str, Any]] = {}
+        self._discriminator_subtypes: dict[str, list[str]] = defaultdict(list)
+
+    def get_ref_model(self, ref: str) -> dict[str, Any]:
+        """Resolve a reference to its model definition."""
+        ref_file, ref_path = self.model_resolver.resolve_ref(ref).split("#", 1)
+        ref_body = self._get_ref_body(ref_file) if ref_file else self.raw_obj
+        return get_model_by_path(ref_body, ref_path.split("/")[1:])
+
+    def get_data_type(self, obj: JsonSchemaObject) -> DataType:
+        """Get data type from JSON schema object, handling OpenAPI nullable semantics."""
+        # OpenAPI 3.0 doesn't allow `null` in the `type` field and list of types
+        # https://swagger.io/docs/specification/data-models/data-types/#null
+        # OpenAPI 3.1 does allow `null` in the `type` field and is equivalent to
+        # a `nullable` flag on the property itself
+        if obj.nullable and self.strict_nullable and isinstance(obj.type, str):
+            obj.type = [obj.type, "null"]
+
+        return super().get_data_type(obj)
+
+    def _get_discriminator_union_type(self, ref: str) -> DataType | None:
+        """Create a union type for discriminator subtypes if available."""
+        subtypes = self._discriminator_subtypes.get(ref, [])
+        if not subtypes:
+            return None
+        refs = map(self.model_resolver.add_ref, subtypes)
+        return self.data_type(data_types=[self.data_type(reference=r) for r in refs])
+
+    def get_ref_data_type(self, ref: str) -> DataType:
+        """Get data type for a reference, handling discriminator polymorphism."""
+        if ref in self._discriminator_schemas and (union_type := self._get_discriminator_union_type(ref)):
+            return union_type
+        return super().get_ref_data_type(ref)
+
+    def parse_object_fields(
+        self,
+        obj: JsonSchemaObject,
+        path: list[str],
+        module_name: Optional[str] = None,  # noqa: UP045
+        class_name: Optional[str] = None,  # noqa: UP045
+    ) -> list[DataModelFieldBase]:
+        """Parse object fields, adding discriminator info for allOf polymorphism."""
+        fields = super().parse_object_fields(obj, path, module_name, class_name=class_name)
+        properties = obj.properties or {}
+
+        result_fields: list[DataModelFieldBase] = []
+        for field_obj in fields:
+            field = properties.get(field_obj.original_name)
+
+            if (
+                isinstance(field, JsonSchemaObject)
+                and field.ref
+                and (discriminator := self._discriminator_schemas.get(field.ref))
+            ):
+                new_field_type = self._get_discriminator_union_type(field.ref) or field_obj.data_type
+                field_obj = self.data_model_field_type(**{  # noqa: PLW2901
+                    **field_obj.__dict__,
+                    "data_type": new_field_type,
+                    "extras": {**field_obj.extras, "discriminator": discriminator},
+                })
+            result_fields.append(field_obj)
+
+        return result_fields
+
+    def resolve_object(self, obj: ReferenceObject | BaseModelT, object_type: type[BaseModelT]) -> BaseModelT:
+        """Resolve a reference object to its actual type or return the object as-is."""
+        if isinstance(obj, ReferenceObject):
+            ref_obj = self.get_ref_model(obj.ref)
+            return object_type.parse_obj(ref_obj)
+        return obj
+
+    def _parse_schema_or_ref(
+        self,
+        name: str,
+        schema: JsonSchemaObject | ReferenceObject | None,
+        path: list[str],
+    ) -> DataType | None:
+        """Parse a schema object or resolve a reference to get DataType."""
+        if schema is None:
+            return None
+        if isinstance(schema, JsonSchemaObject):
+            return self.parse_schema(name, schema, path)
+        self.resolve_ref(schema.ref)
+        return self.get_ref_data_type(schema.ref)
+
+    def _process_path_items(  # noqa: PLR0913
+        self,
+        items: dict[str, dict[str, Any]],
+        base_path: list[str],
+        scope_name: str,
+        global_parameters: list[dict[str, Any]],
+        security: list[dict[str, list[str]]] | None,
+        *,
+        strip_leading_slash: bool = True,
+    ) -> None:
+        """Process path or webhook items with operations."""
+        scope_path = [*base_path, f"#/{scope_name}"]
+        for item_name, methods_ in items.items():
+            item_ref = methods_.get("$ref")
+            if item_ref:
+                methods = self.get_ref_model(item_ref)
+                # Extract base path from reference for external file resolution
+                resolved_ref = self.model_resolver.resolve_ref(item_ref)
+                ref_file = resolved_ref.split("#")[0] if "#" in resolved_ref else resolved_ref
+                ref_base_path = Path(ref_file).parent if ref_file and not is_url(ref_file) else None
+            else:
+                methods = methods_
+                ref_base_path = None
+
+            item_parameters = global_parameters.copy()
+            if "parameters" in methods:
+                item_parameters.extend(methods["parameters"])
+
+            relative_name = item_name[1:] if strip_leading_slash else item_name.removeprefix("/")
+            path = [*scope_path, relative_name] if relative_name else get_special_path("root", scope_path)
+
+            base_path_context = (
+                self.model_resolver.current_base_path_context(ref_base_path) if ref_base_path else nullcontext()
+            )
+            with base_path_context:
+                for operation_name, raw_operation in methods.items():
+                    if operation_name not in OPERATION_NAMES:
+                        continue
+                    if item_parameters:
+                        if "parameters" in raw_operation:
+                            raw_operation["parameters"].extend(item_parameters)
+                        else:
+                            raw_operation["parameters"] = item_parameters.copy()
+                    if security is not None and "security" not in raw_operation:
+                        raw_operation["security"] = security
+                    self.parse_operation(raw_operation, [*path, operation_name])
+
+    def parse_schema(
+        self,
+        name: str,
+        obj: JsonSchemaObject,
+        path: list[str],
+    ) -> DataType:
+        """Parse a JSON schema object into a data type."""
+        if obj.is_array:
+            data_type = self.parse_array(name, obj, [*path, name])
+        elif obj.allOf:  # pragma: no cover
+            data_type = self.parse_all_of(name, obj, path)
+        elif obj.oneOf or obj.anyOf:  # pragma: no cover
+            data_type = self.parse_root_type(name, obj, path)
+            if isinstance(data_type, EmptyDataType) and obj.properties:
+                self.parse_object(name, obj, path)
+        elif obj.is_object:
+            data_type = self.parse_object(name, obj, path)
+        elif obj.enum:  # pragma: no cover
+            data_type = self.parse_enum(name, obj, path)
+        elif obj.ref:  # pragma: no cover
+            data_type = self.get_ref_data_type(obj.ref)
+        else:
+            data_type = self.get_data_type(obj)
+        self.parse_ref(obj, path)
+        return data_type
+
+    def parse_request_body(
+        self,
+        name: str,
+        request_body: RequestBodyObject,
+        path: list[str],
+    ) -> dict[str, DataType]:
+        """Parse request body content into data types by media type."""
+        data_types: dict[str, DataType] = {}
+        for media_type, media_obj in request_body.content.items():
+            data_type = self._parse_schema_or_ref(name, media_obj.schema_, [*path, media_type])
+            if data_type:
+                data_types[media_type] = data_type
+        return data_types
+
+    def parse_responses(
+        self,
+        name: str,
+        responses: dict[str | int, ReferenceObject | ResponseObject],
+        path: list[str],
+    ) -> dict[str | int, dict[str, DataType]]:
+        """Parse response objects into data types by status code and content type."""
+        data_types: defaultdict[str | int, dict[str, DataType]] = defaultdict(dict)
+        for status_code, detail in responses.items():
+            if isinstance(detail, ReferenceObject):
+                if not detail.ref:  # pragma: no cover
+                    continue
+                ref_model = self.get_ref_model(detail.ref)
+                content = {k: MediaObject.parse_obj(v) for k, v in ref_model.get("content", {}).items()}
+            else:
+                content = detail.content
+
+            if self.allow_responses_without_content and not content:
+                data_types[status_code]["application/json"] = DataType(type="None")
+
+            for content_type, obj in content.items():
+                response_path: list[str] = [*path, str(status_code), str(content_type)]
+                data_type = self._parse_schema_or_ref(name, obj.schema_, response_path)
+                if data_type:
+                    data_types[status_code][content_type] = data_type  # pyright: ignore[reportArgumentType]
+
+        return data_types
+
+    @classmethod
+    def parse_tags(
+        cls,
+        name: str,  # noqa: ARG003
+        tags: list[str],
+        path: list[str],  # noqa: ARG003
+    ) -> list[str]:
+        """Parse operation tags."""
+        return tags
+
+    _field_name_resolver: FieldNameResolver = FieldNameResolver()
+
+    @classmethod
+    def _get_model_name(cls, path_name: str, method: str, suffix: str) -> str:
+        normalized = cls._field_name_resolver.get_valid_name(path_name, ignore_snake_case_field=True)
+        camel_path_name = snake_to_upper_camel(normalized)
+        return f"{camel_path_name}{method.capitalize()}{suffix}"
+
+    def parse_all_parameters(
+        self,
+        name: str,
+        parameters: list[ReferenceObject | ParameterObject],
+        path: list[str],
+    ) -> DataType | None:
+        """Parse all operation parameters into a data model."""
+        fields: list[DataModelFieldBase] = []
+        exclude_field_names: set[str] = set()
+        reference = self.model_resolver.add(path, name, class_name=True, unique=True)
+        for parameter_ in parameters:
+            parameter = self.resolve_object(parameter_, ParameterObject)
+            parameter_name = parameter.name
+            if (
+                not parameter_name
+                or parameter.in_ not in {ParameterLocation.query, ParameterLocation.path}
+                or (parameter.in_ == ParameterLocation.path and not self.include_path_parameters)
+            ):
+                continue
+
+            if any(field.original_name == parameter_name for field in fields):
+                msg = f"Parameter name '{parameter_name}' is used more than once."
+                raise Exception(msg)  # noqa: TRY002
+
+            field_name, alias = self.model_resolver.get_valid_field_name_and_alias(
+                field_name=parameter_name,
+                excludes=exclude_field_names,
+                model_type=self.field_name_model_type,
+                class_name=name,
+            )
+            if parameter.schema_:
+                fields.append(
+                    self.get_object_field(
+                        field_name=field_name,
+                        field=parameter.schema_,
+                        field_type=self.parse_item(field_name, parameter.schema_, [*path, name, parameter_name]),
+                        original_field_name=parameter_name,
+                        required=parameter.required,
+                        alias=alias,
+                    )
+                )
+            else:
+                data_types: list[DataType] = []
+                object_schema: JsonSchemaObject | None = None
+                for (
+                    media_type,
+                    media_obj,
+                ) in parameter.content.items():
+                    if not media_obj.schema_:
+                        continue
+                    object_schema = self.resolve_object(media_obj.schema_, JsonSchemaObject)
+                    data_types.append(
+                        self.parse_item(
+                            field_name,
+                            object_schema,
+                            [*path, name, parameter_name, media_type],
+                        )
+                    )
+
+                if not data_types:
+                    continue
+                if len(data_types) == 1:
+                    data_type = data_types[0]
+                else:
+                    data_type = self.data_type(data_types=data_types)
+                    # multiple data_type parse as non-constraints field
+                    object_schema = None
+                fields.append(
+                    self.data_model_field_type(
+                        name=field_name,
+                        default=object_schema.default if object_schema else None,
+                        data_type=data_type,
+                        required=parameter.required,
+                        alias=alias,
+                        constraints=object_schema.dict()
+                        if object_schema and self.is_constraints_field(object_schema)
+                        else None,
+                        nullable=object_schema.nullable
+                        if object_schema and self.strict_nullable and (object_schema.has_default or parameter.required)
+                        else None,
+                        strip_default_none=self.strip_default_none,
+                        extras=self.get_field_extras(object_schema) if object_schema else {},
+                        use_annotated=self.use_annotated,
+                        use_serialize_as_any=self.use_serialize_as_any,
+                        use_field_description=self.use_field_description,
+                        use_inline_field_description=self.use_inline_field_description,
+                        use_default_kwarg=self.use_default_kwarg,
+                        original_name=parameter_name,
+                        has_default=object_schema.has_default if object_schema else False,
+                        type_has_null=object_schema.type_has_null if object_schema else None,
+                    )
+                )
+
+        if OpenAPIScope.Parameters in self.open_api_scopes and fields:
+            # Using _create_data_model from parent class JsonSchemaParser
+            # This method automatically adds frozen=True for DataClass types
+            self.results.append(
+                self._create_data_model(
+                    fields=fields,
+                    reference=reference,
+                    custom_base_class=self.base_class,
+                    custom_template_dir=self.custom_template_dir,
+                    keyword_only=self.keyword_only,
+                    treat_dot_as_module=self.treat_dot_as_module,
+                    dataclass_arguments=self.dataclass_arguments,
+                )
+            )
+            return self.data_type(reference=reference)
+
+        return None
+
+    def parse_operation(
+        self,
+        raw_operation: dict[str, Any],
+        path: list[str],
+    ) -> None:
+        """Parse an OpenAPI operation including parameters, request body, and responses."""
+        operation = Operation.parse_obj(raw_operation)
+        path_name, method = path[-2:]
+        if self.use_operation_id_as_name:
+            if not operation.operationId:
+                msg = (
+                    f"All operations must have an operationId when --use_operation_id_as_name is set."
+                    f"The following path was missing an operationId: {path_name}"
+                )
+                raise Error(msg)
+            path_name = operation.operationId
+            method = ""
+        self.parse_all_parameters(
+            self._get_model_name(
+                path_name, method, suffix="Parameters" if self.include_path_parameters else "ParametersQuery"
+            ),
+            operation.parameters,
+            [*path, "parameters"],
+        )
+        if operation.requestBody:
+            if isinstance(operation.requestBody, ReferenceObject):
+                ref_model = self.get_ref_model(operation.requestBody.ref)
+                request_body = RequestBodyObject.parse_obj(ref_model)
+            else:
+                request_body = operation.requestBody
+            self.parse_request_body(
+                name=self._get_model_name(path_name, method, suffix="Request"),
+                request_body=request_body,
+                path=[*path, "requestBody"],
+            )
+        self.parse_responses(
+            name=self._get_model_name(path_name, method, suffix="Response"),
+            responses=operation.responses,
+            path=[*path, "responses"],
+        )
+        if OpenAPIScope.Tags in self.open_api_scopes:
+            self.parse_tags(
+                name=self._get_model_name(path_name, method, suffix="Tags"),
+                tags=operation.tags,
+                path=[*path, "tags"],
+            )
+
+    def parse_raw(self) -> None:
+        """Parse OpenAPI specification including schemas, paths, and operations."""
+        for source, path_parts in self._get_context_source_path_parts():
+            if self.validation:
+                warn(
+                    "Deprecated: `--validation` option is deprecated. the option will be removed in a future "
+                    "release. please use another tool to validate OpenAPI.\n",
+                    stacklevel=2,
+                )
+
+                try:
+                    from prance import BaseParser  # noqa: PLC0415
+
+                    BaseParser(
+                        spec_string=source.text,
+                        backend="openapi-spec-validator",
+                        encoding=self.encoding,
+                    )
+                except ImportError:  # pragma: no cover
+                    warn(
+                        "Warning: Validation was skipped for OpenAPI. `prance` or `openapi-spec-validator` are not "
+                        "installed.\n"
+                        "To use --validation option after datamodel-code-generator 0.24.0, Please run `$pip install "
+                        "'datamodel-code-generator[validation]'`.\n",
+                        stacklevel=2,
+                    )
+
+            specification: dict[str, Any] = load_yaml_dict(source.text)
+            self.raw_obj = specification
+            self._collect_discriminator_schemas()
+            schemas: dict[str, Any] = specification.get("components", {}).get("schemas", {})
+            paths: dict[str, Any] = specification.get("paths", {})
+            security: list[dict[str, list[str]]] | None = specification.get("security")
+            # Warn if schemas is empty but paths exist and only Schemas scope is used
+            if not schemas and self.open_api_scopes == [OpenAPIScope.Schemas] and paths:
+                warn(
+                    "No schemas found in components/schemas. If your schemas are defined in "
+                    "external files referenced from paths, consider using --openapi-scopes paths",
+                    stacklevel=2,
+                )
+            if OpenAPIScope.Schemas in self.open_api_scopes:
+                for obj_name, raw_obj in schemas.items():
+                    self.parse_raw_obj(
+                        obj_name,
+                        raw_obj,
+                        [*path_parts, "#/components", "schemas", obj_name],
+                    )
+            if OpenAPIScope.Paths in self.open_api_scopes:
+                # Resolve $ref in global parameter list
+                global_parameters = [
+                    self._get_ref_body(p["$ref"]) if isinstance(p, dict) and "$ref" in p else p
+                    for p in paths.get("parameters", [])
+                    if isinstance(p, dict)
+                ]
+                self._process_path_items(paths, path_parts, "paths", global_parameters, security)
+
+            if OpenAPIScope.Webhooks in self.open_api_scopes:
+                webhooks: dict[str, dict[str, Any]] = specification.get("webhooks", {})
+                self._process_path_items(webhooks, path_parts, "webhooks", [], security, strip_leading_slash=False)
+
+        self._resolve_unparsed_json_pointer()
+
+    def _collect_discriminator_schemas(self) -> None:
+        """Collect schemas with discriminators but no oneOf/anyOf, and find their subtypes."""
+        schemas: dict[str, Any] = self.raw_obj.get("components", {}).get("schemas", {})
+
+        for schema_name, schema in schemas.items():
+            discriminator = schema.get("discriminator")
+            if not discriminator:
+                continue
+
+            if schema.get("oneOf") or schema.get("anyOf"):
+                continue
+
+            ref = f"#/components/schemas/{schema_name}"
+            self._discriminator_schemas[ref] = discriminator
+
+        for schema_name, schema in schemas.items():
+            for all_of_item in schema.get("allOf", []):
+                ref_in_allof = all_of_item.get("$ref")
+                if ref_in_allof and ref_in_allof in self._discriminator_schemas:
+                    subtype_ref = f"#/components/schemas/{schema_name}"
+                    self._discriminator_subtypes[ref_in_allof].append(subtype_ref)
diff -pruN 0.26.4-3/src/datamodel_code_generator/pydantic_patch.py 0.45.0-1/src/datamodel_code_generator/pydantic_patch.py
--- 0.26.4-3/src/datamodel_code_generator/pydantic_patch.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/pydantic_patch.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,28 @@
+"""Pydantic compatibility patches for Python 3.12+.
+
+Patches pydantic.typing.evaluate_forwardref for forward reference evaluation
+compatibility with newer Python versions.
+"""
+
+from __future__ import annotations
+
+import sys
+from typing import Any
+
+import pydantic.typing
+
+
+def patched_evaluate_forwardref(
+    forward_ref: Any, globalns: dict[str, Any], localns: dict[str, Any] | None = None
+) -> None:  # pragma: no cover
+    """Evaluate a forward reference with Python 3.12+ compatibility."""
+    try:
+        return forward_ref._evaluate(globalns, localns or None, set())  # pragma: no cover  # noqa: SLF001
+    except TypeError:
+        # Fallback for Python 3.12 compatibility
+        return forward_ref._evaluate(globalns, localns or None, set(), recursive_guard=set())  # noqa: SLF001
+
+
+# Patch only Python3.12
+if sys.version_info >= (3, 12):
+    pydantic.typing.evaluate_forwardref = patched_evaluate_forwardref  # pyright: ignore[reportAttributeAccessIssue]
diff -pruN 0.26.4-3/src/datamodel_code_generator/reference.py 0.45.0-1/src/datamodel_code_generator/reference.py
--- 0.26.4-3/src/datamodel_code_generator/reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/reference.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,979 @@
+"""Reference resolution and model tracking system.
+
+Provides Reference for tracking model references across schemas, ModelResolver
+for managing class names and field names, and FieldNameResolver for converting
+schema field names to valid Python identifiers.
+"""
+
+from __future__ import annotations
+
+import re
+from collections import defaultdict
+from contextlib import contextmanager
+from enum import Enum, auto
+from functools import cached_property, lru_cache
+from itertools import zip_longest
+from keyword import iskeyword
+from pathlib import Path, PurePath
+from re import Pattern
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    ClassVar,
+    NamedTuple,
+    Optional,
+    Protocol,
+    TypeVar,
+    cast,
+    runtime_checkable,
+)
+from urllib.parse import ParseResult, urlparse
+
+import pydantic
+from packaging import version
+from pydantic import BaseModel, Field
+from typing_extensions import TypeIs
+
+from datamodel_code_generator import Error
+from datamodel_code_generator.util import PYDANTIC_V2, ConfigDict, camel_to_snake, model_validator
+
+if TYPE_CHECKING:
+    from collections.abc import Generator, Iterator, Mapping, Sequence
+    from collections.abc import Set as AbstractSet
+
+    import inflect
+    from pydantic.typing import DictStrAny
+
+    from datamodel_code_generator.model.base import DataModel
+    from datamodel_code_generator.types import DataType
+
+
+def _is_data_type(value: object) -> TypeIs[DataType]:
+    """Check if value is a DataType instance."""
+    from datamodel_code_generator.types import DataType as DataType_  # noqa: PLC0415
+
+    return isinstance(value, DataType_)
+
+
+def _is_data_model(value: object) -> TypeIs[DataModel]:
+    """Check if value is a DataModel instance."""
+    from datamodel_code_generator.model.base import DataModel as DataModel_  # noqa: PLC0415
+
+    return isinstance(value, DataModel_)
+
+
+@runtime_checkable
+class ReferenceChild(Protocol):
+    """Protocol for objects that can be stored in Reference.children.
+
+    This is a minimal protocol - actual usage checks isinstance for DataType
+    or DataModel to access specific methods like replace_reference or class_name.
+    Using a property makes the type covariant, allowing both DataModel (Reference)
+    and DataType (Reference | None) to satisfy this protocol.
+    """
+
+    @property
+    def reference(self) -> Reference | None:
+        """Return the reference associated with this object."""
+        ...
+
+
+class _BaseModel(BaseModel):
+    """Base model with field exclusion and pass-through support."""
+
+    _exclude_fields: ClassVar[set[str]] = set()
+    _pass_fields: ClassVar[set[str]] = set()
+
+    if not TYPE_CHECKING:
+
+        def __init__(self, **values: Any) -> None:
+            super().__init__(**values)
+            for pass_field_name in self._pass_fields:
+                if pass_field_name in values:
+                    setattr(self, pass_field_name, values[pass_field_name])
+
+    if not TYPE_CHECKING:
+        if PYDANTIC_V2:
+
+            def dict(  # noqa: PLR0913
+                self,
+                *,
+                include: AbstractSet[int | str] | Mapping[int | str, Any] | None = None,
+                exclude: AbstractSet[int | str] | Mapping[int | str, Any] | None = None,
+                by_alias: bool = False,
+                exclude_unset: bool = False,
+                exclude_defaults: bool = False,
+                exclude_none: bool = False,
+            ) -> DictStrAny:
+                return self.model_dump(
+                    include=include,
+                    exclude=set(exclude or ()) | self._exclude_fields,
+                    by_alias=by_alias,
+                    exclude_unset=exclude_unset,
+                    exclude_defaults=exclude_defaults,
+                    exclude_none=exclude_none,
+                )
+
+        else:
+
+            def dict(  # noqa: PLR0913
+                self,
+                *,
+                include: AbstractSet[int | str] | Mapping[int | str, Any] | None = None,
+                exclude: AbstractSet[int | str] | Mapping[int | str, Any] | None = None,
+                by_alias: bool = False,
+                skip_defaults: bool | None = None,
+                exclude_unset: bool = False,
+                exclude_defaults: bool = False,
+                exclude_none: bool = False,
+            ) -> DictStrAny:
+                return super().dict(
+                    include=include,
+                    exclude=set(exclude or ()) | self._exclude_fields,
+                    by_alias=by_alias,
+                    skip_defaults=skip_defaults,
+                    exclude_unset=exclude_unset,
+                    exclude_defaults=exclude_defaults,
+                    exclude_none=exclude_none,
+                )
+
+
+class Reference(_BaseModel):
+    """Represents a reference to a model in the schema.
+
+    Tracks path, name, and relationships between models for resolution.
+    """
+
+    path: str
+    original_name: str = ""
+    name: str
+    duplicate_name: Optional[str] = None  # noqa: UP045
+    loaded: bool = True
+    source: Optional[ReferenceChild] = None  # noqa: UP045
+    children: list[ReferenceChild] = Field(default_factory=list)
+    _exclude_fields: ClassVar[set[str]] = {"children"}
+
+    @model_validator(mode="before")
+    def validate_original_name(cls, values: Any) -> Any:  # noqa: N805
+        """Assign name to original_name if original_name is empty."""
+        if not isinstance(values, dict):  # pragma: no cover
+            return values
+        original_name = values.get("original_name")
+        if original_name:
+            return values
+
+        values["original_name"] = values.get("name", original_name)
+        return values
+
+    if PYDANTIC_V2:
+        # TODO[pydantic]: The following keys were removed: `copy_on_model_validation`.
+        # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
+        model_config = ConfigDict(  # pyright: ignore[reportAssignmentType]
+            arbitrary_types_allowed=True,
+            ignored_types=(cached_property,),
+            revalidate_instances="never",
+        )
+    else:
+
+        class Config:
+            """Pydantic v1 configuration for Reference model."""
+
+            arbitrary_types_allowed = True
+            keep_untouched = (cached_property,)
+            copy_on_model_validation = False if version.parse(pydantic.VERSION) < version.parse("1.9.2") else "none"
+
+    @property
+    def short_name(self) -> str:
+        """Return the last component of the dotted name."""
+        return self.name.rsplit(".", 1)[-1]
+
+    def replace_children_references(self, new_reference: Reference) -> None:
+        """Replace all DataType children's reference with new_reference."""
+        for child in self.children[:]:
+            if _is_data_type(child):
+                child.replace_reference(new_reference)
+
+    def iter_data_model_children(self) -> Iterator[DataModel]:
+        """Yield all DataModel children."""
+        for child in self.children:
+            if _is_data_model(child):
+                yield child
+
+
+SINGULAR_NAME_SUFFIX: str = "Item"
+
+ID_PATTERN: Pattern[str] = re.compile(r"^#[^/].*")
+
+SPECIAL_PATH_MARKER: str = "#-datamodel-code-generator-#-"
+
+T = TypeVar("T")
+
+
+@contextmanager
+def context_variable(setter: Callable[[T], None], current_value: T, new_value: T) -> Generator[None, None, None]:
+    """Context manager that temporarily sets a value and restores it on exit."""
+    previous_value: T = current_value
+    setter(new_value)
+    try:
+        yield
+    finally:
+        setter(previous_value)
+
+
+class FieldNameResolver:
+    """Converts schema field names to valid Python identifiers."""
+
+    def __init__(  # noqa: PLR0913, PLR0917
+        self,
+        aliases: Mapping[str, str] | None = None,
+        snake_case_field: bool = False,  # noqa: FBT001, FBT002
+        empty_field_name: str | None = None,
+        original_delimiter: str | None = None,
+        special_field_name_prefix: str | None = None,
+        remove_special_field_name_prefix: bool = False,  # noqa: FBT001, FBT002
+        capitalise_enum_members: bool = False,  # noqa: FBT001, FBT002
+        no_alias: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
+        """Initialize field name resolver with transformation options."""
+        self.aliases: Mapping[str, str] = {} if aliases is None else {**aliases}
+        self.empty_field_name: str = empty_field_name or "_"
+        self.snake_case_field = snake_case_field
+        self.original_delimiter: str | None = original_delimiter
+        self.special_field_name_prefix: str | None = (
+            "field" if special_field_name_prefix is None else special_field_name_prefix
+        )
+        self.remove_special_field_name_prefix: bool = remove_special_field_name_prefix
+        self.capitalise_enum_members: bool = capitalise_enum_members
+        self.no_alias = no_alias
+
+    @classmethod
+    def _validate_field_name(cls, field_name: str) -> bool:  # noqa: ARG003
+        """Check if a field name is valid. Subclasses may override."""
+        return True
+
+    def get_valid_name(  # noqa: PLR0912
+        self,
+        name: str,
+        excludes: set[str] | None = None,
+        ignore_snake_case_field: bool = False,  # noqa: FBT001, FBT002
+        upper_camel: bool = False,  # noqa: FBT001, FBT002
+    ) -> str:
+        """Convert a name to a valid Python identifier."""
+        if not name:
+            name = self.empty_field_name
+        if name[0] == "#":
+            name = name[1:] or self.empty_field_name
+
+        if self.snake_case_field and not ignore_snake_case_field and self.original_delimiter is not None:
+            name = snake_to_upper_camel(name, delimiter=self.original_delimiter)
+
+        name = re.sub(r"[¹²³⁴⁵⁶⁷⁸⁹]|\W", "_", name)
+        if name[0].isnumeric():
+            name = f"{self.special_field_name_prefix}_{name}"
+
+        # We should avoid having a field begin with an underscore, as it
+        # causes pydantic to consider it as private
+        while name.startswith("_"):
+            if self.remove_special_field_name_prefix:
+                name = name[1:]
+            else:
+                name = f"{self.special_field_name_prefix}{name}"
+                break
+        if self.capitalise_enum_members or (self.snake_case_field and not ignore_snake_case_field):
+            name = camel_to_snake(name)
+        count = 1
+        if iskeyword(name) or not self._validate_field_name(name):
+            name += "_"
+        if upper_camel:
+            new_name = snake_to_upper_camel(name)
+        elif self.capitalise_enum_members:
+            new_name = name.upper()
+        else:
+            new_name = name
+        while (
+            not new_name.isidentifier()
+            or iskeyword(new_name)
+            or (excludes and new_name in excludes)
+            or not self._validate_field_name(new_name)
+        ):
+            new_name = f"{name}{count}" if upper_camel else f"{name}_{count}"
+            count += 1
+        return new_name
+
+    def get_valid_field_name_and_alias(
+        self,
+        field_name: str,
+        excludes: set[str] | None = None,
+        path: list[str] | None = None,
+        class_name: str | None = None,
+    ) -> tuple[str, str | None]:
+        """Get valid field name and original alias if different.
+
+        Supports hierarchical alias resolution with the following priority:
+        1. Scoped aliases (ClassName.field_name) - class-level specificity
+        2. Flat aliases (field_name) - applies to all occurrences
+
+        Args:
+            field_name: The original field name from the schema.
+            excludes: Set of names to avoid when generating valid names.
+            path: Unused, kept for backward compatibility.
+            class_name: Optional class name for scoped alias resolution.
+        """
+        del path
+        if class_name:
+            scoped_key = f"{class_name}.{field_name}"
+            if scoped_key in self.aliases:
+                return self.aliases[scoped_key], field_name
+
+        if field_name in self.aliases:
+            return self.aliases[field_name], field_name
+
+        valid_name = self.get_valid_name(field_name, excludes=excludes)
+        return (
+            valid_name,
+            None if self.no_alias or field_name == valid_name else field_name,
+        )
+
+
+class PydanticFieldNameResolver(FieldNameResolver):
+    """Field name resolver that avoids Pydantic reserved names."""
+
+    @classmethod
+    def _validate_field_name(cls, field_name: str) -> bool:
+        """Check field name doesn't conflict with BaseModel attributes."""
+        # TODO: Support Pydantic V2
+        return not hasattr(BaseModel, field_name)
+
+
+class EnumFieldNameResolver(FieldNameResolver):
+    """Field name resolver for enum members with special handling for reserved names.
+
+    When using --use-subclass-enum, enums inherit from types like str or int.
+    Member names that conflict with methods of these types cause type checker errors.
+    This class detects and handles such conflicts by adding underscore suffixes.
+
+    The _BUILTIN_TYPE_ATTRIBUTES set is intentionally static (not using hasattr)
+    to avoid runtime Python version differences affecting code generation.
+    Based on Python 3.8-3.14 method names (union of all versions for safety).
+    Note: 'mro' is handled explicitly in get_valid_name for backward compatibility.
+    """
+
+    _BUILTIN_TYPE_ATTRIBUTES: ClassVar[frozenset[str]] = frozenset({
+        "as_integer_ratio",
+        "bit_count",
+        "bit_length",
+        "capitalize",
+        "casefold",
+        "center",
+        "conjugate",
+        "count",
+        "decode",
+        "denominator",
+        "encode",
+        "endswith",
+        "expandtabs",
+        "find",
+        "format",
+        "format_map",
+        "from_bytes",
+        "from_number",
+        "fromhex",
+        "hex",
+        "imag",
+        "index",
+        "isalnum",
+        "isalpha",
+        "isascii",
+        "isdecimal",
+        "isdigit",
+        "isidentifier",
+        "islower",
+        "isnumeric",
+        "isprintable",
+        "isspace",
+        "istitle",
+        "isupper",
+        "is_integer",
+        "join",
+        "ljust",
+        "lower",
+        "lstrip",
+        "maketrans",
+        "numerator",
+        "partition",
+        "real",
+        "removeprefix",
+        "removesuffix",
+        "replace",
+        "rfind",
+        "rindex",
+        "rjust",
+        "rpartition",
+        "rsplit",
+        "rstrip",
+        "split",
+        "splitlines",
+        "startswith",
+        "strip",
+        "swapcase",
+        "title",
+        "to_bytes",
+        "translate",
+        "upper",
+        "zfill",
+    })
+
+    @classmethod
+    def _validate_field_name(cls, field_name: str) -> bool:
+        """Check field name doesn't conflict with subclass enum base type attributes.
+
+        When using --use-subclass-enum, enums inherit from types like str or int.
+        Member names that conflict with methods of these types (e.g., 'count' for str)
+        cause type checker errors. This method detects such conflicts.
+        """
+        return field_name not in cls._BUILTIN_TYPE_ATTRIBUTES
+
+    def get_valid_name(
+        self,
+        name: str,
+        excludes: set[str] | None = None,
+        ignore_snake_case_field: bool = False,  # noqa: FBT001, FBT002
+        upper_camel: bool = False,  # noqa: FBT001, FBT002
+    ) -> str:
+        """Convert name to valid enum member, handling reserved names."""
+        return super().get_valid_name(
+            name="mro_" if name == "mro" else name,
+            excludes={"mro"} | (excludes or set()),
+            ignore_snake_case_field=ignore_snake_case_field,
+            upper_camel=upper_camel,
+        )
+
+
+class ModelType(Enum):
+    """Type of model for field name resolution strategy."""
+
+    PYDANTIC = auto()
+    ENUM = auto()
+    CLASS = auto()
+
+
+DEFAULT_FIELD_NAME_RESOLVERS: dict[ModelType, type[FieldNameResolver]] = {
+    ModelType.ENUM: EnumFieldNameResolver,
+    ModelType.PYDANTIC: PydanticFieldNameResolver,
+    ModelType.CLASS: FieldNameResolver,
+}
+
+
+class ClassName(NamedTuple):
+    """A class name with optional duplicate name for disambiguation."""
+
+    name: str
+    duplicate_name: str | None
+
+
+def get_relative_path(base_path: PurePath, target_path: PurePath) -> PurePath:
+    """Calculate relative path from base to target."""
+    if base_path == target_path:
+        return Path()
+    if not target_path.is_absolute():
+        return target_path
+    parent_count: int = 0
+    children: list[str] = []
+    for base_part, target_part in zip_longest(base_path.parts, target_path.parts):
+        if base_part == target_part and not parent_count:
+            continue
+        if base_part or not target_part:
+            parent_count += 1
+        if target_part:
+            children.append(target_part)
+    return Path(*[".." for _ in range(parent_count)], *children)
+
+
+class ModelResolver:  # noqa: PLR0904
+    """Manages model references, class names, and field name resolution.
+
+    Central registry for all model references during parsing, handling
+    name uniqueness, path resolution, and field name transformations.
+    """
+
+    def __init__(  # noqa: PLR0913, PLR0917
+        self,
+        exclude_names: set[str] | None = None,
+        duplicate_name_suffix: str | None = None,
+        base_url: str | None = None,
+        singular_name_suffix: str | None = None,
+        aliases: Mapping[str, str] | None = None,
+        snake_case_field: bool = False,  # noqa: FBT001, FBT002
+        empty_field_name: str | None = None,
+        custom_class_name_generator: Callable[[str], str] | None = None,
+        base_path: Path | None = None,
+        field_name_resolver_classes: dict[ModelType, type[FieldNameResolver]] | None = None,
+        original_field_name_delimiter: str | None = None,
+        special_field_name_prefix: str | None = None,
+        remove_special_field_name_prefix: bool = False,  # noqa: FBT001, FBT002
+        capitalise_enum_members: bool = False,  # noqa: FBT001, FBT002
+        no_alias: bool = False,  # noqa: FBT001, FBT002
+        remove_suffix_number: bool = False,  # noqa: FBT001, FBT002
+        parent_scoped_naming: bool = False,  # noqa: FBT001, FBT002
+        treat_dot_as_module: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
+        """Initialize model resolver with naming and resolution options."""
+        self.references: dict[str, Reference] = {}
+        self._current_root: Sequence[str] = []
+        self._root_id: str | None = None
+        self._root_id_base_path: str | None = None
+        self.ids: defaultdict[str, dict[str, str]] = defaultdict(dict)
+        self.after_load_files: set[str] = set()
+        self.exclude_names: set[str] = exclude_names or set()
+        self.duplicate_name_suffix: str | None = duplicate_name_suffix
+        self._base_url: str | None = base_url
+        self.singular_name_suffix: str = (
+            singular_name_suffix if isinstance(singular_name_suffix, str) else SINGULAR_NAME_SUFFIX
+        )
+        merged_field_name_resolver_classes = DEFAULT_FIELD_NAME_RESOLVERS.copy()
+        if field_name_resolver_classes:  # pragma: no cover
+            merged_field_name_resolver_classes.update(field_name_resolver_classes)
+        self.field_name_resolvers: dict[ModelType, FieldNameResolver] = {
+            k: v(
+                aliases=aliases,
+                snake_case_field=snake_case_field,
+                empty_field_name=empty_field_name,
+                original_delimiter=original_field_name_delimiter,
+                special_field_name_prefix=special_field_name_prefix,
+                remove_special_field_name_prefix=remove_special_field_name_prefix,
+                capitalise_enum_members=capitalise_enum_members if k == ModelType.ENUM else False,
+                no_alias=no_alias,
+            )
+            for k, v in merged_field_name_resolver_classes.items()
+        }
+        self.class_name_generator = custom_class_name_generator or self.default_class_name_generator
+        self._base_path: Path = base_path or Path.cwd()
+        self._current_base_path: Path | None = self._base_path
+        self.remove_suffix_number: bool = remove_suffix_number
+        self.parent_scoped_naming = parent_scoped_naming
+        self.treat_dot_as_module = treat_dot_as_module
+
+    @property
+    def current_base_path(self) -> Path | None:
+        """Return the current base path for file resolution."""
+        return self._current_base_path
+
+    def set_current_base_path(self, base_path: Path | None) -> None:
+        """Set the current base path for file resolution."""
+        self._current_base_path = base_path
+
+    @property
+    def base_url(self) -> str | None:
+        """Return the base URL for reference resolution."""
+        return self._base_url
+
+    def set_base_url(self, base_url: str | None) -> None:
+        """Set the base URL for reference resolution."""
+        self._base_url = base_url
+
+    @contextmanager
+    def current_base_path_context(self, base_path: Path | None) -> Generator[None, None, None]:
+        """Temporarily set the current base path within a context."""
+        if base_path:
+            base_path = (self._base_path / base_path).resolve()
+        with context_variable(self.set_current_base_path, self.current_base_path, base_path):
+            yield
+
+    @contextmanager
+    def base_url_context(self, base_url: str | None) -> Generator[None, None, None]:
+        """Temporarily set the base URL within a context.
+
+        Only sets the base_url if:
+        - The new value is actually a URL (http://, https://, or file://)
+        - OR _base_url was already set (switching between URLs)
+        This preserves backward compatibility for local file parsing where
+        this method was previously a no-op.
+        """
+        if self._base_url or (base_url and is_url(base_url)):
+            with context_variable(self.set_base_url, self.base_url, base_url):
+                yield
+        else:
+            yield
+
+    @property
+    def current_root(self) -> Sequence[str]:
+        """Return the current root path components."""
+        return self._current_root
+
+    def set_current_root(self, current_root: Sequence[str]) -> None:
+        """Set the current root path components."""
+        self._current_root = current_root
+
+    @contextmanager
+    def current_root_context(self, current_root: Sequence[str]) -> Generator[None, None, None]:
+        """Temporarily set the current root path within a context."""
+        with context_variable(self.set_current_root, self.current_root, current_root):
+            yield
+
+    @property
+    def root_id(self) -> str | None:
+        """Return the root identifier for the current schema."""
+        return self._root_id
+
+    @property
+    def root_id_base_path(self) -> str | None:
+        """Return the base path component of the root identifier."""
+        return self._root_id_base_path
+
+    def set_root_id(self, root_id: str | None) -> None:
+        """Set the root identifier and extract its base path."""
+        if root_id and "/" in root_id:
+            self._root_id_base_path = root_id.rsplit("/", 1)[0]
+        else:
+            self._root_id_base_path = None
+
+        self._root_id = root_id
+
+    def add_id(self, id_: str, path: Sequence[str]) -> None:
+        """Register an identifier mapping to a resolved reference path."""
+        self.ids["/".join(self.current_root)][id_] = self.resolve_ref(path)
+
+    def resolve_ref(self, path: Sequence[str] | str) -> str:  # noqa: PLR0911, PLR0912, PLR0914
+        """Resolve a reference path to its canonical form."""
+        joined_path = path if isinstance(path, str) else self.join_path(path)
+        if joined_path == "#":
+            return f"{'/'.join(self.current_root)}#"
+        if self.current_base_path and not self.base_url and joined_path[0] != "#" and not is_url(joined_path):
+            # resolve local file path
+            file_path, fragment = joined_path.split("#", 1) if "#" in joined_path else (joined_path, "")
+            resolved_file_path = Path(self.current_base_path, file_path).resolve()
+            joined_path = get_relative_path(self._base_path, resolved_file_path).as_posix()
+            if fragment:
+                joined_path += f"#{fragment}"
+        if ID_PATTERN.match(joined_path) and SPECIAL_PATH_MARKER not in joined_path:
+            id_scope = "/".join(self.current_root)
+            scoped_ids = self.ids[id_scope]
+            ref: str | None = scoped_ids.get(joined_path)
+            if ref is None:
+                msg = (
+                    f"Unresolved $id reference '{joined_path}' in scope '{id_scope or '<root>'}'. "
+                    f"Known $id values: {', '.join(sorted(scoped_ids)) or '<none>'}"
+                )
+                raise Error(msg)
+        else:
+            if "#" not in joined_path:
+                joined_path += "#"
+            elif joined_path[0] == "#" and self.current_root:
+                joined_path = f"{'/'.join(self.current_root)}{joined_path}"
+
+            file_path, fragment = joined_path.split("#", 1)
+            ref = f"{file_path}#{fragment}"
+            if (
+                self.root_id_base_path
+                and not self.base_url
+                and not (is_url(joined_path) or Path(self._base_path, file_path).is_file())
+            ):
+                ref = f"{self.root_id_base_path}/{ref}"
+
+        if is_url(ref):
+            file_part, path_part = ref.split("#", 1)
+            id_scope = "/".join(self.current_root)
+            scoped_ids = self.ids[id_scope]
+            if file_part in scoped_ids:
+                mapped_ref = scoped_ids[file_part]
+                if path_part:
+                    mapped_base, mapped_fragment = mapped_ref.split("#", 1) if "#" in mapped_ref else (mapped_ref, "")
+                    combined_fragment = f"{mapped_fragment.rstrip('/')}/{path_part.lstrip('/')}"
+                    return f"{mapped_base}#{combined_fragment}"
+                return mapped_ref
+
+        if self.base_url:
+            from .http import join_url  # noqa: PLC0415
+
+            effective_base = self.root_id or self.base_url
+            joined_url = join_url(effective_base, ref)
+            if "#" in joined_url:
+                return joined_url
+            return f"{joined_url}#"
+
+        if is_url(ref):
+            file_part, path_part = ref.split("#", 1)
+            if file_part == self.root_id:
+                return f"{'/'.join(self.current_root)}#{path_part}"
+            target_url: ParseResult = urlparse(file_part)
+            if not (self.root_id and self.current_base_path):
+                return ref
+            root_id_url: ParseResult = urlparse(self.root_id)
+            if (target_url.scheme, target_url.netloc) == (
+                root_id_url.scheme,
+                root_id_url.netloc,
+            ):  # pragma: no cover
+                target_url_path = Path(target_url.path)
+                target_path = (
+                    self.current_base_path
+                    / get_relative_path(Path(root_id_url.path).parent, target_url_path.parent)
+                    / target_url_path.name
+                )
+                if target_path.exists():
+                    return f"{target_path.resolve().relative_to(self._base_path)}#{path_part}"
+
+        return ref
+
+    def is_after_load(self, ref: str) -> bool:
+        """Check if a reference points to a file loaded after the current one."""
+        if is_url(ref) or not self.current_base_path:
+            return False
+        file_part, *_ = ref.split("#", 1)
+        absolute_path = Path(self._base_path, file_part).resolve().as_posix()
+        if self.is_external_root_ref(ref) or self.is_external_ref(ref):
+            return absolute_path in self.after_load_files
+        return False  # pragma: no cover
+
+    @staticmethod
+    def is_external_ref(ref: str) -> bool:
+        """Check if a reference points to an external file."""
+        return "#" in ref and ref[0] != "#"
+
+    @staticmethod
+    def is_external_root_ref(ref: str) -> bool:
+        """Check if a reference points to an external file root."""
+        return bool(ref) and ref[-1] == "#"
+
+    @staticmethod
+    def join_path(path: Sequence[str]) -> str:
+        """Join path components with slashes and normalize anchors."""
+        joined_path = "/".join(p for p in path if p).replace("/#", "#")
+        if "#" not in joined_path:
+            joined_path += "#"
+        return joined_path
+
+    def add_ref(self, ref: str, resolved: bool = False) -> Reference:  # noqa: FBT001, FBT002
+        """Add a reference and return the Reference object."""
+        path = self.resolve_ref(ref) if not resolved else ref
+        if reference := self.references.get(path):
+            return reference
+        split_ref = ref.rsplit("/", 1)
+        if len(split_ref) == 1:
+            original_name = Path(split_ref[0].rstrip("#") if self.is_external_root_ref(path) else split_ref[0]).stem
+        else:
+            original_name = Path(split_ref[1].rstrip("#")).stem if self.is_external_root_ref(path) else split_ref[1]
+        name = self.get_class_name(original_name, unique=False).name
+        reference = Reference(
+            path=path,
+            original_name=original_name,
+            name=name,
+            loaded=False,
+        )
+
+        self.references[path] = reference
+        return reference
+
+    def _check_parent_scope_option(self, name: str, path: Sequence[str]) -> str:
+        if self.parent_scoped_naming:
+            parent_path = path[:-1]
+            while parent_path:
+                if parent_reference := self.references.get(self.join_path(parent_path)):
+                    return f"{parent_reference.name}_{name}"
+                parent_path = parent_path[:-1]
+        return name
+
+    def add(  # noqa: PLR0913
+        self,
+        path: Sequence[str],
+        original_name: str,
+        *,
+        class_name: bool = False,
+        singular_name: bool = False,
+        unique: bool = True,
+        singular_name_suffix: str | None = None,
+        loaded: bool = False,
+    ) -> Reference:
+        """Add or update a model reference with the given path and name."""
+        joined_path = self.join_path(path)
+        reference: Reference | None = self.references.get(joined_path)
+        if reference:
+            if loaded and not reference.loaded:
+                reference.loaded = True
+            if not original_name or original_name in {reference.original_name, reference.name}:
+                return reference
+        name = original_name
+        duplicate_name: str | None = None
+        if class_name:
+            name = self._check_parent_scope_option(name, path)
+            name, duplicate_name = self.get_class_name(
+                name=name,
+                unique=unique,
+                reserved_name=reference.name if reference else None,
+                singular_name=singular_name,
+                singular_name_suffix=singular_name_suffix,
+            )
+        else:
+            # TODO: create a validate for module name
+            name = self.get_valid_field_name(name, model_type=ModelType.CLASS)
+            if singular_name:  # pragma: no cover
+                name = get_singular_name(name, singular_name_suffix or self.singular_name_suffix)
+            elif unique:  # pragma: no cover
+                unique_name = self._get_unique_name(name)
+                if unique_name != name:
+                    duplicate_name = name
+                name = unique_name
+        if reference:
+            reference.original_name = original_name
+            reference.name = name
+            reference.loaded = loaded
+            reference.duplicate_name = duplicate_name
+        else:
+            reference = Reference(
+                path=joined_path,
+                original_name=original_name,
+                name=name,
+                loaded=loaded,
+                duplicate_name=duplicate_name,
+            )
+            self.references[joined_path] = reference
+        return reference
+
+    def get(self, path: Sequence[str] | str) -> Reference | None:
+        """Get a reference by path, returning None if not found."""
+        return self.references.get(self.resolve_ref(path))
+
+    def delete(self, path: Sequence[str] | str) -> None:
+        """Delete a reference by path if it exists."""
+        resolved = self.resolve_ref(path)
+        if resolved in self.references:
+            del self.references[resolved]
+
+    def default_class_name_generator(self, name: str) -> str:
+        """Generate a valid class name from a string."""
+        # TODO: create a validate for class name
+        return self.field_name_resolvers[ModelType.CLASS].get_valid_name(
+            name, ignore_snake_case_field=True, upper_camel=True
+        )
+
+    def get_class_name(
+        self,
+        name: str,
+        unique: bool = True,  # noqa: FBT001, FBT002
+        reserved_name: str | None = None,
+        singular_name: bool = False,  # noqa: FBT001, FBT002
+        singular_name_suffix: str | None = None,
+    ) -> ClassName:
+        """Generate a unique class name with optional singularization."""
+        if "." in name:
+            split_name = name.split(".")
+            prefix = ".".join(
+                # TODO: create a validate for class name
+                self.field_name_resolvers[ModelType.CLASS].get_valid_name(n, ignore_snake_case_field=True)
+                for n in split_name[:-1]
+            )
+            prefix += "."
+            class_name = split_name[-1]
+        else:
+            prefix = ""
+            class_name = name
+
+        class_name = self.class_name_generator(class_name)
+
+        if singular_name:
+            class_name = get_singular_name(class_name, singular_name_suffix or self.singular_name_suffix)
+        duplicate_name: str | None = None
+        if unique:
+            if reserved_name == class_name:
+                return ClassName(name=class_name, duplicate_name=duplicate_name)
+
+            unique_name = self._get_unique_name(class_name, camel=True)
+            if unique_name != class_name:
+                duplicate_name = class_name
+            class_name = unique_name
+        return ClassName(name=f"{prefix}{class_name}", duplicate_name=duplicate_name)
+
+    def _get_unique_name(self, name: str, camel: bool = False) -> str:  # noqa: FBT001, FBT002
+        unique_name: str = name
+        count: int = 0 if self.remove_suffix_number else 1
+        reference_names = {r.name for r in self.references.values()} | self.exclude_names
+        while unique_name in reference_names:
+            if self.duplicate_name_suffix:
+                name_parts: list[str | int] = [
+                    name,
+                    self.duplicate_name_suffix,
+                    count - 1,
+                ]
+            else:
+                name_parts = [name, count]
+            delimiter = "" if camel else "_"
+            unique_name = delimiter.join(str(p) for p in name_parts if p) if count else name
+            count += 1
+        return unique_name
+
+    @classmethod
+    def validate_name(cls, name: str) -> bool:
+        """Check if a name is a valid Python identifier."""
+        return name.isidentifier() and not iskeyword(name)
+
+    def get_valid_field_name(
+        self,
+        name: str,
+        excludes: set[str] | None = None,
+        model_type: ModelType = ModelType.PYDANTIC,
+    ) -> str:
+        """Get a valid field name for the specified model type."""
+        return self.field_name_resolvers[model_type].get_valid_name(name, excludes)
+
+    def get_valid_field_name_and_alias(
+        self,
+        field_name: str,
+        excludes: set[str] | None = None,
+        model_type: ModelType = ModelType.PYDANTIC,
+        path: list[str] | None = None,
+        class_name: str | None = None,
+    ) -> tuple[str, str | None]:
+        """Get a valid field name and alias for the specified model type.
+
+        Args:
+            field_name: The original field name from the schema.
+            excludes: Set of names to avoid when generating valid names.
+            model_type: The type of model (PYDANTIC, ENUM, or CLASS).
+            path: Unused, kept for backward compatibility.
+            class_name: Optional class name for scoped alias resolution.
+
+        Returns:
+            A tuple of (valid_field_name, alias_or_none).
+        """
+        del path
+        return self.field_name_resolvers[model_type].get_valid_field_name_and_alias(
+            field_name, excludes, class_name=class_name
+        )
+
+
+def _get_inflect_engine() -> inflect.engine:
+    """Get or create the inflect engine lazily."""
+    global _inflect_engine  # noqa: PLW0603
+    if _inflect_engine is None:
+        import inflect  # noqa: PLC0415
+
+        _inflect_engine = inflect.engine()
+    return _inflect_engine
+
+
+_inflect_engine: inflect.engine | None = None
+
+
+@lru_cache
+def get_singular_name(name: str, suffix: str = SINGULAR_NAME_SUFFIX) -> str:
+    """Convert a plural name to singular form."""
+    singular_name = _get_inflect_engine().singular_noun(cast("inflect.Word", name))
+    if singular_name is False:
+        singular_name = f"{name}{suffix}"
+    return singular_name  # pyright: ignore[reportReturnType]
+
+
+@lru_cache
+def snake_to_upper_camel(word: str, delimiter: str = "_") -> str:
+    """Convert snake_case or delimited string to UpperCamelCase."""
+    prefix = ""
+    if word.startswith(delimiter):
+        prefix = "_"
+        word = word[1:]
+
+    return prefix + "".join(x[0].upper() + x[1:] for x in word.split(delimiter) if x)
+
+
+def is_url(ref: str) -> bool:
+    """Check if a reference string is a URL (HTTP, HTTPS, or file scheme)."""
+    return ref.startswith(("https://", "http://", "file://"))
diff -pruN 0.26.4-3/src/datamodel_code_generator/types.py 0.45.0-1/src/datamodel_code_generator/types.py
--- 0.26.4-3/src/datamodel_code_generator/types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/types.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,733 @@
+"""Core type system for data model generation.
+
+Provides DataType for representing types with references and constraints,
+DataTypeManager as the abstract base for type mappings, and supporting
+utilities for handling unions, optionals, and type hints.
+"""
+
+from __future__ import annotations
+
+import re
+from abc import ABC, abstractmethod
+from enum import Enum, auto
+from functools import lru_cache
+from itertools import chain
+from re import Pattern
+from typing import (
+    TYPE_CHECKING,
+    Any,
+    Callable,
+    ClassVar,
+    Optional,
+    Protocol,
+    TypeVar,
+    Union,
+    runtime_checkable,
+)
+
+import pydantic
+from packaging import version
+from pydantic import StrictBool, StrictInt, StrictStr, create_model
+from typing_extensions import TypeIs
+
+from datamodel_code_generator.format import (
+    DatetimeClassType,
+    PythonVersion,
+    PythonVersionMin,
+)
+from datamodel_code_generator.imports import (
+    IMPORT_ABC_MAPPING,
+    IMPORT_ABC_SEQUENCE,
+    IMPORT_ABC_SET,
+    IMPORT_ANY,
+    IMPORT_DICT,
+    IMPORT_FROZEN_SET,
+    IMPORT_LIST,
+    IMPORT_LITERAL,
+    IMPORT_MAPPING,
+    IMPORT_OPTIONAL,
+    IMPORT_SEQUENCE,
+    IMPORT_SET,
+    IMPORT_UNION,
+    Import,
+)
+from datamodel_code_generator.reference import Reference, _BaseModel
+from datamodel_code_generator.util import PYDANTIC_V2, ConfigDict
+
+T = TypeVar("T")
+SourceT = TypeVar("SourceT")
+
+OPTIONAL = "Optional"
+OPTIONAL_PREFIX = f"{OPTIONAL}["
+
+UNION = "Union"
+UNION_PREFIX = f"{UNION}["
+UNION_DELIMITER = ", "
+UNION_PATTERN: Pattern[str] = re.compile(r"\s*,\s*")
+UNION_OPERATOR_DELIMITER = " | "
+UNION_OPERATOR_PATTERN: Pattern[str] = re.compile(r"\s*\|\s*")
+NONE = "None"
+ANY = "Any"
+LITERAL = "Literal"
+SEQUENCE = "Sequence"
+FROZEN_SET = "FrozenSet"
+MAPPING = "Mapping"
+DICT = "Dict"
+SET = "Set"
+LIST = "List"
+STANDARD_DICT = "dict"
+STANDARD_LIST = "list"
+STANDARD_SET = "set"
+STR = "str"
+
+NOT_REQUIRED = "NotRequired"
+NOT_REQUIRED_PREFIX = f"{NOT_REQUIRED}["
+
+if TYPE_CHECKING:
+    import builtins
+    from collections.abc import Iterable, Iterator, Sequence
+
+    from pydantic_core import core_schema
+
+    from datamodel_code_generator.model.base import DataModelFieldBase
+
+if PYDANTIC_V2:
+    from pydantic import GetCoreSchemaHandler
+    from pydantic_core import core_schema
+
+
+class StrictTypes(Enum):
+    """Strict type options for generated models."""
+
+    str = "str"
+    bytes = "bytes"
+    int = "int"
+    float = "float"
+    bool = "bool"
+
+
+class UnionIntFloat:
+    """Pydantic-compatible type that accepts both int and float values."""
+
+    def __init__(self, value: float) -> None:
+        """Initialize with an int or float value."""
+        self.value: int | float = value
+
+    def __int__(self) -> int:
+        """Convert value to int."""
+        return int(self.value)
+
+    def __float__(self) -> float:
+        """Convert value to float."""
+        return float(self.value)
+
+    def __str__(self) -> str:
+        """Convert value to string."""
+        return str(self.value)
+
+    @classmethod
+    def __get_validators__(cls) -> Iterator[Callable[[Any], Any]]:  # noqa: PLW3201
+        """Return Pydantic v1 validators."""
+        yield cls.validate
+
+    @classmethod
+    def __get_pydantic_core_schema__(  # noqa: PLW3201
+        cls, _source_type: Any, _handler: GetCoreSchemaHandler
+    ) -> core_schema.CoreSchema:
+        """Return Pydantic v2 core schema."""
+        from_int_schema = core_schema.chain_schema([
+            core_schema.union_schema([core_schema.int_schema(), core_schema.float_schema()]),
+            core_schema.no_info_plain_validator_function(cls.validate),
+        ])
+
+        return core_schema.json_or_python_schema(
+            json_schema=from_int_schema,
+            python_schema=core_schema.union_schema([
+                # check if it's an instance first before doing any further work
+                core_schema.is_instance_schema(UnionIntFloat),
+                from_int_schema,
+            ]),
+            serialization=core_schema.plain_serializer_function_ser_schema(lambda instance: instance.value),
+        )
+
+    @classmethod
+    def validate(cls, v: Any) -> UnionIntFloat:
+        """Validate and convert value to UnionIntFloat."""
+        if isinstance(v, UnionIntFloat):
+            return v
+        if not isinstance(v, (int, float)):  # pragma: no cover
+            try:
+                int(v)
+                return cls(v)
+            except (TypeError, ValueError):
+                pass
+            try:
+                float(v)
+                return cls(v)
+            except (TypeError, ValueError):
+                pass
+
+            msg = f"{v} is not int or float"
+            raise TypeError(msg)
+        return cls(v)
+
+
+def chain_as_tuple(*iterables: Iterable[T]) -> tuple[T, ...]:
+    """Chain multiple iterables and return as a tuple."""
+    return tuple(chain(*iterables))
+
+
+def _remove_none_from_union(type_: str, *, use_union_operator: bool) -> str:  # noqa: PLR0912
+    """Remove None from a Union type string, handling nested unions."""
+    if use_union_operator:
+        if " | " not in type_:
+            return type_
+        separator = "|"
+        inner_text = type_
+    else:
+        if not type_.startswith(UNION_PREFIX):
+            return type_
+        separator = ","
+        inner_text = type_[len(UNION_PREFIX) : -1]
+
+    parts = []
+    inner_count = 0
+    current_part = ""
+
+    # With this variable we count any non-escaped round bracket, whenever we are inside a
+    # constraint string expression. Once found a part starting with `constr(`, we increment
+    # this counter for each non-escaped opening round bracket and decrement it for each
+    # non-escaped closing round bracket.
+    in_constr = 0
+
+    # Parse union parts carefully to handle nested structures
+    for char in inner_text:
+        current_part += char
+        if char == "[" and in_constr == 0:
+            inner_count += 1
+        elif char == "]" and in_constr == 0:
+            inner_count -= 1
+        elif char == "(":
+            if current_part.strip().startswith("constr(") and current_part[-2] != "\\":
+                # non-escaped opening round bracket found inside constraint string expression
+                in_constr += 1
+        elif char == ")":
+            if in_constr > 0 and current_part[-2] != "\\":
+                # non-escaped closing round bracket found inside constraint string expression
+                in_constr -= 1
+        elif char == separator and inner_count == 0 and in_constr == 0:
+            part = current_part[:-1].strip()
+            if part != NONE:
+                # Process nested unions recursively
+                # only UNION_PREFIX might be nested but not union_operator
+                if not use_union_operator and part.startswith(UNION_PREFIX):
+                    part = _remove_none_from_union(part, use_union_operator=False)
+                parts.append(part)
+            current_part = ""
+
+    part = current_part.strip()
+    if current_part and part != NONE:
+        # only UNION_PREFIX might be nested but not union_operator
+        if not use_union_operator and part.startswith(UNION_PREFIX):
+            part = _remove_none_from_union(part, use_union_operator=False)
+        parts.append(part)
+
+    if not parts:
+        return NONE
+    if len(parts) == 1:
+        return parts[0]
+
+    if use_union_operator:
+        return UNION_OPERATOR_DELIMITER.join(parts)
+
+    return f"{UNION_PREFIX}{UNION_DELIMITER.join(parts)}]"
+
+
+@lru_cache
+def get_optional_type(type_: str, use_union_operator: bool) -> str:  # noqa: FBT001
+    """Wrap a type string in Optional or add | None suffix."""
+    type_ = _remove_none_from_union(type_, use_union_operator=use_union_operator)
+
+    if not type_ or type_ == NONE:
+        return NONE
+    if use_union_operator:
+        return f"{type_} | {NONE}"
+    return f"{OPTIONAL_PREFIX}{type_}]"
+
+
+def is_data_model_field(obj: object) -> TypeIs[DataModelFieldBase]:
+    """Check if an object is a DataModelFieldBase instance."""
+    from datamodel_code_generator.model.base import DataModelFieldBase  # noqa: PLC0415
+
+    return isinstance(obj, DataModelFieldBase)
+
+
+@runtime_checkable
+class Modular(Protocol):
+    """Protocol for objects with a module name property."""
+
+    @property
+    def module_name(self) -> str:
+        """Return the module name."""
+        raise NotImplementedError
+
+
+@runtime_checkable
+class Nullable(Protocol):
+    """Protocol for objects with a nullable property."""
+
+    @property
+    def nullable(self) -> bool:
+        """Return whether the type is nullable."""
+        raise NotImplementedError
+
+
+class DataType(_BaseModel):
+    """Represents a type in generated code with imports and references."""
+
+    if PYDANTIC_V2:
+        # TODO[pydantic]: The following keys were removed: `copy_on_model_validation`.
+        # Check https://docs.pydantic.dev/dev-v2/migration/#changes-to-config for more information.
+        model_config = ConfigDict(  # pyright: ignore[reportAssignmentType]
+            extra="forbid",
+            revalidate_instances="never",
+        )
+    else:
+        if not TYPE_CHECKING:
+
+            @classmethod
+            def model_rebuild(
+                cls,
+                *,
+                _types_namespace: dict[str, type] | None = None,
+            ) -> None:
+                """Update forward references for Pydantic v1."""
+                localns = _types_namespace or {}
+                cls.update_forward_refs(**localns)
+
+        class Config:
+            """Pydantic v1 model configuration."""
+
+            extra = "forbid"
+            copy_on_model_validation = False if version.parse(pydantic.VERSION) < version.parse("1.9.2") else "none"
+
+    type: Optional[str] = None  # noqa: UP045
+    reference: Optional[Reference] = None  # noqa: UP045
+    data_types: list[DataType] = []  # noqa: RUF012
+    is_func: bool = False
+    kwargs: Optional[dict[str, Any]] = None  # noqa: UP045
+    import_: Optional[Import] = None  # noqa: UP045
+    python_version: PythonVersion = PythonVersionMin
+    is_optional: bool = False
+    is_dict: bool = False
+    is_list: bool = False
+    is_set: bool = False
+    is_custom_type: bool = False
+    literals: list[Union[StrictBool, StrictInt, StrictStr]] = []  # noqa: RUF012, UP007
+    enum_member_literals: list[tuple[str, str]] = []  # noqa: RUF012  # [(EnumClassName, member_name), ...]
+    use_standard_collections: bool = False
+    use_generic_container: bool = False
+    use_union_operator: bool = False
+    alias: Optional[str] = None  # noqa: UP045
+    parent: Union[DataModelFieldBase, DataType, None] = None  # noqa: UP007
+    children: list[DataType] = []  # noqa: RUF012
+    strict: bool = False
+    dict_key: Optional[DataType] = None  # noqa: UP045
+    treat_dot_as_module: bool = False
+    use_serialize_as_any: bool = False
+
+    _exclude_fields: ClassVar[set[str]] = {"parent", "children"}
+    _pass_fields: ClassVar[set[str]] = {"parent", "children", "data_types", "reference"}
+
+    @classmethod
+    def from_import(  # noqa: PLR0913
+        cls: builtins.type[DataTypeT],
+        import_: Import,
+        *,
+        is_optional: bool = False,
+        is_dict: bool = False,
+        is_list: bool = False,
+        is_set: bool = False,
+        is_custom_type: bool = False,
+        strict: bool = False,
+        kwargs: dict[str, Any] | None = None,
+    ) -> DataTypeT:
+        """Create a DataType from an Import object."""
+        return cls(
+            type=import_.import_,
+            import_=import_,
+            is_optional=is_optional,
+            is_dict=is_dict,
+            is_list=is_list,
+            is_set=is_set,
+            is_func=bool(kwargs),
+            is_custom_type=is_custom_type,
+            strict=strict,
+            kwargs=kwargs,
+        )
+
+    @property
+    def unresolved_types(self) -> frozenset[str]:
+        """Return set of unresolved type reference paths."""
+        return frozenset(
+            {t.reference.path for data_types in self.data_types for t in data_types.all_data_types if t.reference}
+            | ({self.reference.path} if self.reference else set())
+        )
+
+    def replace_reference(self, reference: Reference | None) -> None:
+        """Replace this DataType's reference with a new one."""
+        if not self.reference:  # pragma: no cover
+            msg = f"`{self.__class__.__name__}.replace_reference()` can't be called when `reference` field is empty."
+            raise Exception(msg)  # noqa: TRY002
+        self_id = id(self)
+        self.reference.children = [c for c in self.reference.children if id(c) != self_id]
+        self.reference = reference
+        if reference:
+            reference.children.append(self)
+
+    def remove_reference(self) -> None:
+        """Remove the reference from this DataType."""
+        self.replace_reference(None)
+
+    def swap_with(self, new_data_type: DataType) -> None:
+        """Detach self and attach new_data_type to the same parent.
+
+        Replaces this DataType with new_data_type in the parent container.
+        Works with both field parents and nested DataType parents.
+        """
+        parent = self.parent
+        self.parent = None
+        if parent is not None:  # pragma: no cover
+            new_data_type.parent = parent
+            if is_data_model_field(parent):
+                parent.data_type = new_data_type
+            elif isinstance(parent, DataType):  # pragma: no cover
+                parent.data_types = [new_data_type if d is self else d for d in parent.data_types]
+
+    @property
+    def module_name(self) -> str | None:
+        """Return the module name from the reference source."""
+        if self.reference and isinstance(self.reference.source, Modular):
+            return self.reference.source.module_name
+        return None  # pragma: no cover
+
+    @property
+    def full_name(self) -> str:
+        """Return the fully qualified name including module."""
+        module_name = self.module_name
+        if module_name:
+            return f"{module_name}.{self.reference.short_name if self.reference else ''}"
+        return self.reference.short_name if self.reference else ""
+
+    @property
+    def all_data_types(self) -> Iterator[DataType]:
+        """Recursively yield all nested DataTypes including self."""
+        for data_type in self.data_types:
+            yield from data_type.all_data_types
+        yield self
+
+    def find_source(self, source_type: type[SourceT]) -> SourceT | None:
+        """Find the first reference source matching the given type from all nested data types."""
+        for data_type in self.all_data_types:  # pragma: no branch
+            if not data_type.reference:  # pragma: no cover
+                continue
+            source = data_type.reference.source
+            if isinstance(source, source_type):  # pragma: no cover
+                return source
+        return None  # pragma: no cover
+
+    @property
+    def all_imports(self) -> Iterator[Import]:
+        """Recursively yield all imports from nested DataTypes and self."""
+        for data_type in self.data_types:
+            yield from data_type.all_imports
+        yield from self.imports
+
+    @property
+    def imports(self) -> Iterator[Import]:
+        """Yield imports required by this DataType."""
+        # Add base import if exists
+        if self.import_:
+            yield self.import_
+
+        # Define required imports based on type features and conditions
+        imports: tuple[tuple[bool, Import], ...] = (
+            (self.is_optional and not self.use_union_operator, IMPORT_OPTIONAL),
+            (len(self.data_types) > 1 and not self.use_union_operator, IMPORT_UNION),
+            (bool(self.literals) or bool(self.enum_member_literals), IMPORT_LITERAL),
+        )
+
+        if self.use_generic_container:
+            if self.use_standard_collections:
+                imports = (
+                    *imports,
+                    (self.is_list, IMPORT_ABC_SEQUENCE),
+                    (self.is_set, IMPORT_ABC_SET),
+                    (self.is_dict, IMPORT_ABC_MAPPING),
+                )
+            else:
+                imports = (
+                    *imports,
+                    (self.is_list, IMPORT_SEQUENCE),
+                    (self.is_set, IMPORT_FROZEN_SET),
+                    (self.is_dict, IMPORT_MAPPING),
+                )
+        elif not self.use_standard_collections:
+            imports = (
+                *imports,
+                (self.is_list, IMPORT_LIST),
+                (self.is_set, IMPORT_SET),
+                (self.is_dict, IMPORT_DICT),
+            )
+
+        # Yield imports based on conditions
+        for field, import_ in imports:
+            if field and import_ != self.import_:
+                yield import_
+
+        # Propagate imports from any dict_key type
+        if self.dict_key:
+            yield from self.dict_key.imports
+
+    def __init__(self, **values: Any) -> None:
+        """Initialize DataType with validation and reference setup."""
+        if not TYPE_CHECKING:
+            super().__init__(**values)
+
+        for type_ in self.data_types:
+            if type_.type == ANY and type_.is_optional:
+                if any(t for t in self.data_types if t.type != ANY):  # pragma: no cover
+                    self.is_optional = True
+                    self.data_types = [t for t in self.data_types if not (t.type == ANY and t.is_optional)]
+                break  # pragma: no cover
+
+        for data_type in self.data_types:
+            if data_type.reference or data_type.data_types:
+                data_type.parent = self
+
+        if self.reference:
+            self.reference.children.append(self)
+
+    def _get_wrapped_reference_type_hint(self, type_: str) -> str:  # noqa: PLR6301
+        """Wrap reference type name if needed (override in subclasses, e.g., for SerializeAsAny).
+
+        Args:
+            type_: The reference type name (e.g., "User")
+
+        Returns:
+            The potentially wrapped type name
+        """
+        return type_
+
+    @property
+    def type_hint(self) -> str:  # noqa: PLR0912, PLR0915
+        """Generate the Python type hint string for this DataType."""
+        type_: str | None = self.alias or self.type
+        if not type_:
+            if self.is_union:
+                data_types: list[str] = []
+                for data_type in self.data_types:
+                    data_type_type = data_type.type_hint
+                    if not data_type_type or data_type_type in data_types:
+                        continue
+
+                    if data_type_type == NONE:
+                        self.is_optional = True
+                        continue
+
+                    non_optional_data_type_type = _remove_none_from_union(
+                        data_type_type, use_union_operator=self.use_union_operator
+                    )
+
+                    if non_optional_data_type_type != data_type_type:
+                        self.is_optional = True
+
+                    data_types.append(non_optional_data_type_type)
+                if not data_types:
+                    type_ = ANY
+                    self.import_ = self.import_ or IMPORT_ANY
+                elif len(data_types) == 1:
+                    type_ = data_types[0]
+                elif self.use_union_operator:
+                    type_ = UNION_OPERATOR_DELIMITER.join(data_types)
+                else:
+                    type_ = f"{UNION_PREFIX}{UNION_DELIMITER.join(data_types)}]"
+            elif len(self.data_types) == 1:
+                type_ = self.data_types[0].type_hint
+            elif self.enum_member_literals:
+                parts = [f"{enum_class}.{member}" for enum_class, member in self.enum_member_literals]
+                type_ = f"{LITERAL}[{', '.join(parts)}]"
+            elif self.literals:
+                type_ = f"{LITERAL}[{', '.join(repr(literal) for literal in self.literals)}]"
+            elif self.reference:
+                type_ = self.reference.short_name
+                type_ = self._get_wrapped_reference_type_hint(type_)
+            else:
+                # TODO support strict Any
+                type_ = ""
+        if self.reference:
+            source = self.reference.source
+            if isinstance(source, Nullable) and source.nullable:
+                self.is_optional = True
+        if self.is_list:
+            if self.use_generic_container:
+                list_ = SEQUENCE
+            elif self.use_standard_collections:
+                list_ = STANDARD_LIST
+            else:
+                list_ = LIST
+            type_ = f"{list_}[{type_}]" if type_ else list_
+        elif self.is_set:
+            if self.use_generic_container:
+                set_ = FROZEN_SET
+            elif self.use_standard_collections:
+                set_ = STANDARD_SET
+            else:
+                set_ = SET
+            type_ = f"{set_}[{type_}]" if type_ else set_
+        elif self.is_dict:
+            if self.use_generic_container:
+                dict_ = MAPPING
+            elif self.use_standard_collections:
+                dict_ = STANDARD_DICT
+            else:
+                dict_ = DICT
+            if self.dict_key or type_:
+                key = self.dict_key.type_hint if self.dict_key else STR
+                type_ = f"{dict_}[{key}, {type_ or ANY}]"
+            else:  # pragma: no cover
+                type_ = dict_
+        if self.is_optional and type_ != ANY:
+            return get_optional_type(type_, self.use_union_operator)
+        if self.is_func:
+            if self.kwargs:
+                kwargs: str = ", ".join(f"{k}={v}" for k, v in self.kwargs.items())
+                return f"{type_}({kwargs})"
+            return f"{type_}()"
+        return type_
+
+    @property
+    def is_union(self) -> bool:
+        """Return whether this DataType represents a union of multiple types."""
+        return len(self.data_types) > 1
+
+
+DataTypeT = TypeVar("DataTypeT", bound=DataType)
+
+
+class EmptyDataType(DataType):
+    """A DataType placeholder for empty or unresolved types."""
+
+
+class Types(Enum):
+    """Standard type identifiers for schema type mapping."""
+
+    integer = auto()
+    int32 = auto()
+    int64 = auto()
+    number = auto()
+    float = auto()
+    double = auto()
+    decimal = auto()
+    time = auto()
+    string = auto()
+    byte = auto()
+    binary = auto()
+    date = auto()
+    date_time = auto()
+    timedelta = auto()
+    password = auto()
+    path = auto()
+    email = auto()
+    uuid = auto()
+    uuid1 = auto()
+    uuid2 = auto()
+    uuid3 = auto()
+    uuid4 = auto()
+    uuid5 = auto()
+    uri = auto()
+    hostname = auto()
+    ipv4 = auto()
+    ipv4_network = auto()
+    ipv6 = auto()
+    ipv6_network = auto()
+    boolean = auto()
+    object = auto()
+    null = auto()
+    array = auto()
+    any = auto()
+
+
+class DataTypeManager(ABC):
+    """Abstract base class for managing type mappings in code generation.
+
+    Subclasses implement get_data_type() to map schema types to DataType objects.
+    """
+
+    def __init__(  # noqa: PLR0913, PLR0917
+        self,
+        python_version: PythonVersion = PythonVersionMin,
+        use_standard_collections: bool = False,  # noqa: FBT001, FBT002
+        use_generic_container_types: bool = False,  # noqa: FBT001, FBT002
+        strict_types: Sequence[StrictTypes] | None = None,
+        use_non_positive_negative_number_constrained_types: bool = False,  # noqa: FBT001, FBT002
+        use_decimal_for_multiple_of: bool = False,  # noqa: FBT001, FBT002
+        use_union_operator: bool = False,  # noqa: FBT001, FBT002
+        use_pendulum: bool = False,  # noqa: FBT001, FBT002
+        target_datetime_class: DatetimeClassType | None = None,
+        treat_dot_as_module: bool = False,  # noqa: FBT001, FBT002
+        use_serialize_as_any: bool = False,  # noqa: FBT001, FBT002
+    ) -> None:
+        """Initialize DataTypeManager with code generation options."""
+        self.python_version = python_version
+        self.use_standard_collections: bool = use_standard_collections
+        self.use_generic_container_types: bool = use_generic_container_types
+        self.strict_types: Sequence[StrictTypes] = strict_types or ()
+        self.use_non_positive_negative_number_constrained_types: bool = (
+            use_non_positive_negative_number_constrained_types
+        )
+        self.use_decimal_for_multiple_of: bool = use_decimal_for_multiple_of
+        self.use_union_operator: bool = use_union_operator
+        self.use_pendulum: bool = use_pendulum
+        self.target_datetime_class: DatetimeClassType | None = target_datetime_class
+        self.treat_dot_as_module: bool = treat_dot_as_module
+        self.use_serialize_as_any: bool = use_serialize_as_any
+
+        self.data_type: type[DataType] = create_model(
+            "ContextDataType",
+            python_version=(PythonVersion, python_version),
+            use_standard_collections=(bool, use_standard_collections),
+            use_generic_container=(bool, use_generic_container_types),
+            use_union_operator=(bool, use_union_operator),
+            treat_dot_as_module=(bool, treat_dot_as_module),
+            use_serialize_as_any=(bool, use_serialize_as_any),
+            __base__=DataType,
+        )
+
+    @abstractmethod
+    def get_data_type(self, types: Types, **kwargs: Any) -> DataType:
+        """Map a Types enum value to a DataType. Must be implemented by subclasses."""
+        raise NotImplementedError
+
+    def get_data_type_from_full_path(self, full_path: str, is_custom_type: bool) -> DataType:  # noqa: FBT001
+        """Create a DataType from a fully qualified Python path."""
+        return self.data_type.from_import(Import.from_full_path(full_path), is_custom_type=is_custom_type)
+
+    def get_data_type_from_value(self, value: Any) -> DataType:
+        """Infer a DataType from a Python value."""
+        type_: Types | None = None
+        if isinstance(value, str):
+            type_ = Types.string
+        elif isinstance(value, bool):
+            type_ = Types.boolean
+        elif isinstance(value, int):
+            type_ = Types.integer
+        elif isinstance(value, float):
+            type_ = Types.float
+        elif isinstance(value, dict):
+            return self.data_type.from_import(IMPORT_DICT)
+        elif isinstance(value, list):
+            return self.data_type.from_import(IMPORT_LIST)
+        else:
+            type_ = Types.any
+        return self.get_data_type(type_)
diff -pruN 0.26.4-3/src/datamodel_code_generator/util.py 0.45.0-1/src/datamodel_code_generator/util.py
--- 0.26.4-3/src/datamodel_code_generator/util.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/util.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,155 @@
+"""Utility functions and Pydantic version compatibility helpers.
+
+Provides Pydantic version detection (PYDANTIC_V2), YAML/TOML loading,
+and version-compatible decorators (model_validator, field_validator).
+"""
+
+from __future__ import annotations
+
+import copy
+import re
+from functools import lru_cache
+from typing import TYPE_CHECKING, Any, Callable, Literal, TypeVar, overload
+
+import pydantic
+from packaging import version
+from pydantic import BaseModel as _BaseModel
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+PYDANTIC_VERSION = version.parse(pydantic.VERSION if isinstance(pydantic.VERSION, str) else str(pydantic.VERSION))
+
+PYDANTIC_V2: bool = version.parse("2.0b3") <= PYDANTIC_VERSION
+
+try:
+    from yaml import CSafeLoader as SafeLoader
+except ImportError:  # pragma: no cover
+    from yaml import SafeLoader
+
+try:
+    from tomllib import load as load_tomllib  # type: ignore[ignoreMissingImports]
+except ImportError:
+    from tomli import load as load_tomllib  # type: ignore[ignoreMissingImports]
+
+
+def load_toml(path: Path) -> dict[str, Any]:
+    """Load and parse a TOML file."""
+    with path.open("rb") as f:
+        return load_tomllib(f)
+
+
+SafeLoaderTemp = copy.deepcopy(SafeLoader)
+SafeLoaderTemp.yaml_constructors = copy.deepcopy(SafeLoader.yaml_constructors)
+SafeLoaderTemp.add_constructor(
+    "tag:yaml.org,2002:timestamp",
+    SafeLoaderTemp.yaml_constructors["tag:yaml.org,2002:str"],
+)
+SafeLoader = SafeLoaderTemp
+
+Model = TypeVar("Model", bound=_BaseModel)
+T = TypeVar("T")
+
+
+@overload
+def model_validator(
+    mode: Literal["before"],
+) -> (
+    Callable[[Callable[[type[Model], T], T]], Callable[[type[Model], T], T]]
+    | Callable[[Callable[[Model, T], T]], Callable[[Model, T], T]]
+): ...
+
+
+@overload
+def model_validator(
+    mode: Literal["after"],
+) -> (
+    Callable[[Callable[[type[Model], T], T]], Callable[[type[Model], T], T]]
+    | Callable[[Callable[[Model, T], T]], Callable[[Model, T], T]]
+    | Callable[[Callable[[Model], Model]], Callable[[Model], Model]]
+): ...
+
+
+@overload
+def model_validator() -> (
+    Callable[[Callable[[type[Model], T], T]], Callable[[type[Model], T], T]]
+    | Callable[[Callable[[Model, T], T]], Callable[[Model, T], T]]
+    | Callable[[Callable[[Model], Model]], Callable[[Model], Model]]
+): ...
+
+
+def model_validator(  # pyright: ignore[reportInconsistentOverload]
+    mode: Literal["before", "after"] = "after",
+) -> (
+    Callable[[Callable[[type[Model], T], T]], Callable[[type[Model], T], T]]
+    | Callable[[Callable[[Model, T], T]], Callable[[Model, T], T]]
+    | Callable[[Callable[[Model], Model]], Callable[[Model], Model]]
+):
+    """Decorate model validators for both Pydantic v1 and v2."""
+
+    @overload
+    def inner(method: Callable[[type[Model], T], T]) -> Callable[[type[Model], T], T]: ...
+
+    @overload
+    def inner(method: Callable[[Model, T], T]) -> Callable[[Model, T], T]: ...
+
+    @overload
+    def inner(method: Callable[[Model], Model]) -> Callable[[Model], Model]: ...
+
+    def inner(
+        method: Callable[[type[Model], T], T] | Callable[[Model, T], T] | Callable[[Model], Model],
+    ) -> Callable[[type[Model], T], T] | Callable[[Model, T], T] | Callable[[Model], Model]:
+        if PYDANTIC_V2:
+            from pydantic import model_validator as model_validator_v2  # noqa: PLC0415
+
+            if mode == "before":
+                return model_validator_v2(mode=mode)(classmethod(method))  # type: ignore[reportReturnType]
+            return model_validator_v2(mode=mode)(method)  # type: ignore[reportReturnType]
+        from pydantic import root_validator  # noqa: PLC0415
+
+        return root_validator(method, pre=mode == "before")  # pyright: ignore[reportCallIssue]
+
+    return inner
+
+
+def field_validator(
+    field_name: str,
+    *fields: str,
+    mode: Literal["before", "after"] = "after",
+) -> Callable[[Any], Callable[[BaseModel, Any], Any]]:
+    """Decorate field validators for both Pydantic v1 and v2."""
+
+    def inner(method: Callable[[Model, Any], Any]) -> Callable[[Model, Any], Any]:
+        if PYDANTIC_V2:
+            from pydantic import field_validator as field_validator_v2  # noqa: PLC0415
+
+            return field_validator_v2(field_name, *fields, mode=mode)(method)
+        from pydantic import validator  # noqa: PLC0415
+
+        return validator(field_name, *fields, pre=mode == "before")(method)  # pyright: ignore[reportReturnType]
+
+    return inner
+
+
+if PYDANTIC_V2:
+    from pydantic import ConfigDict
+else:
+    ConfigDict = dict
+
+
+class BaseModel(_BaseModel):
+    """Base Pydantic model with version-compatible configuration."""
+
+    if PYDANTIC_V2:
+        model_config = ConfigDict(strict=False)  # pyright: ignore[reportAssignmentType]
+
+
+_UNDER_SCORE_1: re.Pattern[str] = re.compile(r"([^_])([A-Z][a-z]+)")
+_UNDER_SCORE_2: re.Pattern[str] = re.compile(r"([a-z0-9])([A-Z])")
+
+
+@lru_cache
+def camel_to_snake(string: str) -> str:
+    """Convert camelCase or PascalCase to snake_case."""
+    subbed = _UNDER_SCORE_1.sub(r"\1_\2", string)
+    return _UNDER_SCORE_2.sub(r"\1_\2", subbed).lower()
diff -pruN 0.26.4-3/src/datamodel_code_generator/watch.py 0.45.0-1/src/datamodel_code_generator/watch.py
--- 0.26.4-3/src/datamodel_code_generator/watch.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/src/datamodel_code_generator/watch.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,65 @@
+"""Watch mode for automatic code regeneration."""
+
+from __future__ import annotations
+
+import sys
+from pathlib import Path
+from typing import TYPE_CHECKING, Any
+
+if TYPE_CHECKING:
+    from datamodel_code_generator.__main__ import Config, Exit
+
+
+def _get_watchfiles() -> Any:
+    """Lazily import watchfiles."""
+    try:
+        import watchfiles  # noqa: PLC0415  # pyright: ignore[reportMissingImports]
+    except ImportError as exc:
+        msg = "Please run `pip install 'datamodel-code-generator[watch]'` to use watch mode"
+        raise Exception(msg) from exc  # noqa: TRY002
+    return watchfiles
+
+
+def watch_and_regenerate(
+    config: Config,
+    extra_template_data: dict[str, Any] | None,
+    aliases: dict[str, str] | None,
+    custom_formatters_kwargs: dict[str, str] | None,
+) -> Exit:
+    """Watch input files and regenerate on changes."""
+    from datamodel_code_generator.__main__ import Exit, run_generate_from_config  # noqa: PLC0415
+
+    watchfiles = _get_watchfiles()
+
+    watch_path = Path(config.input) if isinstance(config.input, (str, Path)) else None
+    if watch_path is None:
+        print("Watch mode requires --input file path", file=sys.stderr)  # noqa: T201
+        return Exit.ERROR
+
+    print(f"Watching {watch_path} for changes... (Ctrl+C to stop)")  # noqa: T201
+
+    try:
+        for changes in watchfiles.watch(
+            watch_path,
+            debounce=int(config.watch_delay * 1000),
+            recursive=watch_path.is_dir(),
+        ):
+            print(f"\nDetected changes: {changes}")  # noqa: T201
+            print("Regenerating...")  # noqa: T201
+            try:
+                run_generate_from_config(
+                    config=config,
+                    input_=config.input,  # pyright: ignore[reportArgumentType]
+                    output=config.output,
+                    extra_template_data=extra_template_data,
+                    aliases=aliases,
+                    command_line=None,
+                    custom_formatters_kwargs=custom_formatters_kwargs,
+                )
+                print("Done.")  # noqa: T201
+            except Exception as e:  # noqa: BLE001
+                print(f"Error: {e}", file=sys.stderr)  # noqa: T201
+    except KeyboardInterrupt:
+        print("\nWatch mode stopped.")  # noqa: T201
+
+    return Exit.OK
diff -pruN 0.26.4-3/tests/__init__.py 0.45.0-1/tests/__init__.py
--- 0.26.4-3/tests/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1 @@
+"""Test suite for datamodel-code-generator."""
diff -pruN 0.26.4-3/tests/cli_doc/test_cli_doc_coverage.py 0.45.0-1/tests/cli_doc/test_cli_doc_coverage.py
--- 0.26.4-3/tests/cli_doc/test_cli_doc_coverage.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/cli_doc/test_cli_doc_coverage.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,123 @@
+"""Tests to track CLI documentation coverage.
+
+These tests verify that options intended to be documented have:
+1. A cli_doc marker in tests
+2. An entry in CLI_OPTION_META
+
+The DOCUMENTED_OPTIONS set defines which options should be documented.
+This allows gradual expansion of documentation coverage.
+"""
+
+from __future__ import annotations
+
+import json
+from pathlib import Path
+from typing import Any
+
+import pytest
+
+from datamodel_code_generator.cli_options import (
+    CLI_OPTION_META,
+    MANUAL_DOCS,
+    get_all_canonical_options,
+    get_canonical_option,
+)
+
+COLLECTION_PATH = Path(__file__).parent / ".cli_doc_collection.json"
+
+# Options that should be documented (gradually expand this set)
+# Options in this set MUST have:
+#   1. A cli_doc marker in tests
+#   2. An entry in CLI_OPTION_META
+DOCUMENTED_OPTIONS: frozenset[str] = frozenset({
+    "--frozen-dataclasses",
+    # Add more as cli_doc markers are added to tests...
+})
+
+
+@pytest.fixture(scope="module")
+def collection_data() -> dict[str, Any]:  # pragma: no cover
+    """Load the CLI doc collection data."""
+    if not COLLECTION_PATH.exists():
+        pytest.skip(f"CLI doc collection not found at {COLLECTION_PATH}. Run: pytest --collect-cli-docs -p no:xdist")
+
+    with Path(COLLECTION_PATH).open(encoding="utf-8") as f:
+        return json.load(f)
+
+
+@pytest.fixture(scope="module")
+def collected_options(collection_data: dict[str, Any]) -> set[str]:  # pragma: no cover
+    """Extract canonical options from collection data."""
+    options: set[str] = set()
+    for item in collection_data.get("items", []):
+        options.update(get_canonical_option(opt) for opt in item["marker_kwargs"].get("options", []))
+    return options
+
+
+class TestCLIDocCoverage:  # pragma: no cover
+    """Documentation coverage tests."""
+
+    def test_documented_options_have_cli_doc_markers(  # noqa: PLR6301
+        self, collected_options: set[str]
+    ) -> None:
+        """Verify that DOCUMENTED_OPTIONS have cli_doc markers in tests."""
+        missing = DOCUMENTED_OPTIONS - collected_options
+        if missing:
+            pytest.fail(
+                "Options in DOCUMENTED_OPTIONS but missing cli_doc marker:\n"
+                + "\n".join(f"  - {opt}" for opt in sorted(missing))
+                + "\n\nAdd @pytest.mark.cli_doc(...) to tests for these options."
+            )
+
+    def test_documented_options_have_meta(self) -> None:  # noqa: PLR6301
+        """Verify that DOCUMENTED_OPTIONS have CLI_OPTION_META entries."""
+        missing = DOCUMENTED_OPTIONS - set(CLI_OPTION_META.keys())
+        if missing:
+            pytest.fail(
+                "Options in DOCUMENTED_OPTIONS but missing CLI_OPTION_META:\n"
+                + "\n".join(f"  - {opt}" for opt in sorted(missing))
+                + "\n\nAdd entries to CLI_OPTION_META in cli_options.py."
+            )
+
+    def test_documented_options_not_manual(self) -> None:  # noqa: PLR6301
+        """Verify that DOCUMENTED_OPTIONS are not in MANUAL_DOCS."""
+        overlap = DOCUMENTED_OPTIONS & MANUAL_DOCS
+        if overlap:
+            pytest.fail(
+                "Options in both DOCUMENTED_OPTIONS and MANUAL_DOCS:\n"
+                + "\n".join(f"  - {opt}" for opt in sorted(overlap))
+            )
+
+    def test_collection_schema_version(  # noqa: PLR6301
+        self, collection_data: dict[str, Any]
+    ) -> None:
+        """Verify that collection data has expected schema version."""
+        version = collection_data.get("schema_version")
+        assert version is not None, "Collection data missing 'schema_version'"
+        assert version == 1, f"Unexpected schema version: {version}"
+
+
+class TestCoverageStats:  # pragma: no cover
+    """Informational tests for coverage statistics."""
+
+    @pytest.mark.skip(reason="Informational: run with -v --no-skip to see stats")
+    def test_show_coverage_stats(self, collected_options: set[str]) -> None:  # noqa: PLR6301
+        """Display documentation coverage statistics."""
+        all_options = get_all_canonical_options()
+        documentable = all_options - MANUAL_DOCS
+        undocumented = documentable - collected_options
+
+        print(f"\nUndocumented options ({len(undocumented)}):")  # noqa: T201
+        for opt in sorted(undocumented):
+            print(f"  {opt}")  # noqa: T201
+
+    @pytest.mark.skip(reason="Informational: run with -v --no-skip to see stats")
+    def test_show_documented_options(  # noqa: PLR6301
+        self, collected_options: set[str]
+    ) -> None:
+        """Display currently documented options."""
+        print(f"\nDocumented options ({len(collected_options)}):")  # noqa: T201
+        for opt in sorted(collected_options):
+            meta = CLI_OPTION_META.get(opt)
+            category = meta.category.value if meta else "General Options"
+            print(f"  {opt} ({category})")  # noqa: T201
diff -pruN 0.26.4-3/tests/cli_doc/test_cli_options_sync.py 0.45.0-1/tests/cli_doc/test_cli_options_sync.py
--- 0.26.4-3/tests/cli_doc/test_cli_options_sync.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/cli_doc/test_cli_options_sync.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,115 @@
+"""Tests to ensure CLI_OPTION_META stays in sync with argparse.
+
+These tests verify that:
+1. All options in CLI_OPTION_META exist in argparse
+2. All options in MANUAL_DOCS exist in argparse
+3. There's no overlap between CLI_OPTION_META and MANUAL_DOCS
+"""
+
+from __future__ import annotations
+
+import pytest
+
+from datamodel_code_generator.arguments import arg_parser as argument_parser
+from datamodel_code_generator.cli_options import (
+    CLI_OPTION_META,
+    MANUAL_DOCS,
+    _canonical_option_key,
+    get_all_canonical_options,
+    get_canonical_option,
+)
+
+
+def test_get_canonical_option() -> None:
+    """Test that get_canonical_option normalizes option aliases."""
+    assert get_canonical_option("--help") == "--help"
+    assert get_canonical_option("-h") == "--help"
+    assert get_canonical_option("--input") == "--input"
+    assert get_canonical_option("--unknown-option") == "--unknown-option"
+
+
+class TestCLIOptionMetaSync:  # pragma: no cover
+    """Synchronization tests for CLI_OPTION_META."""
+
+    def test_all_registered_options_exist_in_argparse(self) -> None:  # noqa: PLR6301
+        """Verify that all options in CLI_OPTION_META exist in argparse."""
+        argparse_options = get_all_canonical_options()
+        registered = set(CLI_OPTION_META.keys())
+
+        orphan = registered - argparse_options
+        if orphan:
+            pytest.fail(
+                "Options in CLI_OPTION_META but not in argparse:\n"
+                + "\n".join(f"  - {opt}" for opt in sorted(orphan))
+                + "\n\nRemove them from CLI_OPTION_META or add them to arguments.py."
+            )
+
+    def test_manual_doc_options_exist_in_argparse(self) -> None:  # noqa: PLR6301
+        """Verify that all options in MANUAL_DOCS exist in argparse."""
+        argparse_options = get_all_canonical_options()
+
+        orphan = MANUAL_DOCS - argparse_options
+        if orphan:
+            pytest.fail(
+                "Options in MANUAL_DOCS but not in argparse:\n"
+                + "\n".join(f"  - {opt}" for opt in sorted(orphan))
+                + "\n\nRemove them from MANUAL_DOCS or add them to arguments.py."
+            )
+
+    def test_no_overlap_between_meta_and_manual(self) -> None:  # noqa: PLR6301
+        """Verify that CLI_OPTION_META and MANUAL_DOCS don't overlap."""
+        overlap = set(CLI_OPTION_META.keys()) & MANUAL_DOCS
+        if overlap:
+            pytest.fail(
+                "Options in both CLI_OPTION_META and MANUAL_DOCS:\n"
+                + "\n".join(f"  - {opt}" for opt in sorted(overlap))
+                + "\n\nAn option should be in one or the other, not both."
+            )
+
+    def test_meta_names_match_keys(self) -> None:  # noqa: PLR6301
+        """Verify that CLIOptionMeta.name matches the dict key."""
+        mismatches = []
+        for key, meta in CLI_OPTION_META.items():
+            if key != meta.name:
+                mismatches.append(f"  Key '{key}' != meta.name '{meta.name}'")
+
+        if mismatches:
+            pytest.fail("CLIOptionMeta.name mismatches:\n" + "\n".join(mismatches))
+
+    def test_all_argparse_options_are_documented_or_excluded(self) -> None:  # noqa: PLR6301
+        """Verify that all argparse options are either documented or explicitly excluded.
+
+        This test fails when a new CLI option is added to arguments.py
+        but not added to CLI_OPTION_META or MANUAL_DOCS.
+        """
+        argparse_options = get_all_canonical_options()
+        documented = set(CLI_OPTION_META.keys())
+        manual = MANUAL_DOCS
+        covered = documented | manual
+        missing = argparse_options - covered
+
+        if missing:
+            pytest.fail(
+                "CLI options in argparse but not in CLI_OPTION_META or MANUAL_DOCS:\n"
+                + "\n".join(f"  - {opt}" for opt in sorted(missing))
+                + "\n\nAdd entries to CLI_OPTION_META in cli_options.py, "
+                "or add to MANUAL_DOCS if they should have manual documentation."
+            )
+
+    def test_canonical_option_determination_is_stable(self) -> None:  # noqa: PLR6301
+        """Verify that canonical option determination is deterministic.
+
+        The canonical option should be the longest option string for each action.
+        If multiple options have the same length, the lexicographically last one
+        should be chosen for stability.
+        """
+        for action in argument_parser._actions:
+            if not action.option_strings:
+                continue
+
+            sorted_opts = sorted(action.option_strings, key=_canonical_option_key)
+            canonical = sorted_opts[-1]
+
+            re_sorted = sorted(action.option_strings, key=_canonical_option_key)
+            assert sorted_opts == re_sorted, f"Canonical determination is not stable for {action.option_strings}"
+            assert canonical == re_sorted[-1], f"Canonical mismatch for {action.option_strings}"
diff -pruN 0.26.4-3/tests/conftest.py 0.45.0-1/tests/conftest.py
--- 0.26.4-3/tests/conftest.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/conftest.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,633 @@
+"""Test configuration and shared fixtures."""
+
+from __future__ import annotations
+
+import difflib
+import inspect
+import json
+import re
+import sys
+import time
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, Protocol
+
+import pytest
+from inline_snapshot import external_file, register_format_alias
+
+from datamodel_code_generator import MIN_VERSION
+
+if TYPE_CHECKING:
+    from collections.abc import Callable
+
+CLI_DOC_COLLECTION_OUTPUT = Path(__file__).parent / "cli_doc" / ".cli_doc_collection.json"
+CLI_DOC_SCHEMA_VERSION = 1
+_VERSION_PATTERN = re.compile(r"^\d+\.\d+$")
+
+
+def pytest_addoption(parser: pytest.Parser) -> None:
+    """Add --collect-cli-docs option."""
+    parser.addoption(
+        "--collect-cli-docs",
+        action="store_true",
+        default=False,
+        help="Collect CLI documentation metadata from tests marked with @pytest.mark.cli_doc",
+    )
+
+
+def pytest_configure(config: pytest.Config) -> None:
+    """Register the cli_doc marker."""
+    config.addinivalue_line(
+        "markers",
+        "cli_doc(options, input_schema=None, cli_args=None, golden_output=None, version_outputs=None, "
+        "model_outputs=None, expected_stdout=None, config_content=None, aliases=None, **kwargs): "
+        "Mark test as CLI documentation source. "
+        "Either golden_output, version_outputs, model_outputs, or expected_stdout is required. "
+        "aliases: list of alternative option names (e.g., ['--capitalise-enum-members']).",
+    )
+    config._cli_doc_items: list[dict[str, Any]] = []
+
+
+def _validate_cli_doc_marker(node_id: str, kwargs: dict[str, Any]) -> list[str]:  # noqa: ARG001, PLR0912, PLR0914  # pragma: no cover
+    """Validate marker required fields and types."""
+    errors: list[str] = []
+
+    if "options" not in kwargs:
+        errors.append("Missing required field: 'options'")
+    if "cli_args" not in kwargs:
+        errors.append("Missing required field: 'cli_args'")
+
+    has_golden = "golden_output" in kwargs and kwargs["golden_output"] is not None
+    has_versions = "version_outputs" in kwargs and kwargs["version_outputs"] is not None
+    has_models = "model_outputs" in kwargs and kwargs["model_outputs"] is not None
+    has_stdout = "expected_stdout" in kwargs and kwargs["expected_stdout"] is not None
+    if not has_golden and not has_versions and not has_models and not has_stdout:
+        errors.append("Either 'golden_output', 'version_outputs', 'model_outputs', or 'expected_stdout' is required")
+
+    has_input_schema = "input_schema" in kwargs and kwargs["input_schema"] is not None
+    has_config_content = "config_content" in kwargs and kwargs["config_content"] is not None
+    if not has_input_schema and not has_config_content and not has_stdout:
+        errors.append(
+            "Either 'input_schema' or 'config_content' is required (or 'expected_stdout' with cli_args as input)"
+        )
+
+    if "options" in kwargs:
+        opts = kwargs["options"]
+        if not isinstance(opts, list):
+            errors.append(f"'options' must be a list, got {type(opts).__name__}")
+        elif not opts:
+            errors.append("'options' must be a non-empty list")
+        elif not all(isinstance(o, str) for o in opts):
+            errors.append("'options' must be a list of strings")
+
+    if "cli_args" in kwargs:
+        args = kwargs["cli_args"]
+        if not isinstance(args, list):
+            errors.append(f"'cli_args' must be a list, got {type(args).__name__}")
+        elif not all(isinstance(a, str) for a in args):
+            errors.append("'cli_args' must be a list of strings")
+
+    if "input_schema" in kwargs:
+        schema = kwargs["input_schema"]
+        if not isinstance(schema, str):
+            errors.append(f"'input_schema' must be a string, got {type(schema).__name__}")
+
+    if has_golden:
+        golden = kwargs["golden_output"]
+        if not isinstance(golden, str):
+            errors.append(f"'golden_output' must be a string, got {type(golden).__name__}")
+
+    if has_versions:
+        versions = kwargs["version_outputs"]
+        if not isinstance(versions, dict):
+            errors.append(f"'version_outputs' must be a dict, got {type(versions).__name__}")
+        else:
+            for key, value in versions.items():
+                if not isinstance(key, str):
+                    errors.append(f"'version_outputs' keys must be strings, got {type(key).__name__}")
+                elif not _VERSION_PATTERN.match(key):
+                    errors.append(f"Invalid version key '{key}': must match X.Y format (e.g., '3.10')")
+                if not isinstance(value, str):
+                    errors.append(f"'version_outputs' values must be strings, got {type(value).__name__}")
+
+    if has_models:
+        models = kwargs["model_outputs"]
+        if not isinstance(models, dict):
+            errors.append(f"'model_outputs' must be a dict, got {type(models).__name__}")
+        else:
+            valid_keys = {"pydantic_v1", "pydantic_v2", "dataclass", "typeddict", "msgspec"}
+            for key, value in models.items():
+                if not isinstance(key, str):
+                    errors.append(f"'model_outputs' keys must be strings, got {type(key).__name__}")
+                elif key not in valid_keys:
+                    errors.append(f"Invalid model key '{key}': must be one of {valid_keys}")
+                if not isinstance(value, str):
+                    errors.append(f"'model_outputs' values must be strings, got {type(value).__name__}")
+
+    if "related_options" in kwargs:
+        related = kwargs["related_options"]
+        if not isinstance(related, list):
+            errors.append(f"'related_options' must be a list, got {type(related).__name__}")
+        elif not all(isinstance(r, str) for r in related):
+            errors.append("'related_options' must be a list of strings")
+
+    if "aliases" in kwargs:
+        aliases = kwargs["aliases"]
+        if aliases is not None:
+            if not isinstance(aliases, list):
+                errors.append(f"'aliases' must be a list, got {type(aliases).__name__}")
+            elif not all(isinstance(a, str) for a in aliases):
+                errors.append("'aliases' must be a list of strings")
+
+    return errors
+
+
+def pytest_collection_modifyitems(
+    session: pytest.Session,  # noqa: ARG001
+    config: pytest.Config,
+    items: list[pytest.Item],
+) -> None:  # pragma: no cover
+    """Collect CLI doc metadata from tests with cli_doc marker."""
+    if not config.getoption("--collect-cli-docs"):
+        return
+
+    validation_errors: list[tuple[str, list[str]]] = []
+
+    for item in items:
+        marker = item.get_closest_marker("cli_doc")
+        if marker is None:
+            continue
+
+        errors = _validate_cli_doc_marker(item.nodeid, marker.kwargs)
+        if errors:
+            validation_errors.append((item.nodeid, errors))
+            continue
+
+        docstring = ""
+        func = getattr(item, "function", None)
+        if func is not None:
+            docstring = func.__doc__ or ""
+
+        config._cli_doc_items.append({
+            "node_id": item.nodeid,
+            "marker_kwargs": marker.kwargs,
+            "docstring": docstring,
+        })
+
+    if validation_errors:
+        error_msg = "CLI doc marker validation errors:\n"
+        for node_id, errors in validation_errors:
+            error_msg += f"\n  {node_id}:\n"
+            error_msg += "\n".join(f"    - {e}" for e in errors)
+        pytest.fail(error_msg, pytrace=False)
+
+
+def pytest_runtestloop(session: pytest.Session) -> bool | None:  # pragma: no cover
+    """Skip test execution when --collect-cli-docs is used."""
+    if session.config.getoption("--collect-cli-docs"):
+        return True
+    return None
+
+
+def pytest_sessionfinish(session: pytest.Session, exitstatus: int) -> None:  # noqa: ARG001  # pragma: no cover
+    """Save collected CLI doc metadata to JSON file."""
+    config = session.config
+    if not config.getoption("--collect-cli-docs"):
+        return
+
+    items = getattr(config, "_cli_doc_items", [])
+
+    output = {
+        "schema_version": CLI_DOC_SCHEMA_VERSION,
+        "items": items,
+    }
+
+    CLI_DOC_COLLECTION_OUTPUT.parent.mkdir(parents=True, exist_ok=True)
+    with Path(CLI_DOC_COLLECTION_OUTPUT).open("w", encoding="utf-8") as f:
+        json.dump(output, f, indent=2, ensure_ascii=False)
+
+
+class CodeValidationStats:
+    """Track code validation statistics."""
+
+    def __init__(self) -> None:
+        """Initialize statistics counters."""
+        self.compile_count = 0
+        self.compile_time = 0.0
+        self.exec_count = 0
+        self.exec_time = 0.0
+        self.errors: list[tuple[str, str]] = []
+
+    def record_compile(self, elapsed: float) -> None:
+        """Record a compile operation."""
+        self.compile_count += 1
+        self.compile_time += elapsed
+
+    def record_exec(self, elapsed: float) -> None:
+        """Record an exec operation."""
+        self.exec_count += 1
+        self.exec_time += elapsed
+
+    def record_error(self, file_path: str, error: str) -> None:  # pragma: no cover
+        """Record a validation error."""
+        self.errors.append((file_path, error))
+
+
+_validation_stats = CodeValidationStats()
+
+
+def pytest_terminal_summary(terminalreporter: Any, exitstatus: int, config: pytest.Config) -> None:  # noqa: ARG001  # pragma: no cover
+    """Print code validation and CLI doc collection summary at the end of test run."""
+    if config.getoption("--collect-cli-docs", default=False):
+        items = getattr(config, "_cli_doc_items", [])
+        terminalreporter.write_sep("=", "CLI Documentation Collection")
+        terminalreporter.write_line(f"Collected {len(items)} CLI doc items -> {CLI_DOC_COLLECTION_OUTPUT}")
+
+    if _validation_stats.compile_count > 0:
+        terminalreporter.write_sep("=", "Code Validation Summary")
+        terminalreporter.write_line(
+            f"Compiled {_validation_stats.compile_count} files in {_validation_stats.compile_time:.3f}s "
+            f"(avg: {_validation_stats.compile_time / _validation_stats.compile_count * 1000:.2f}ms)"
+        )
+        if _validation_stats.exec_count > 0:
+            terminalreporter.write_line(
+                f"Executed {_validation_stats.exec_count} files in {_validation_stats.exec_time:.3f}s "
+                f"(avg: {_validation_stats.exec_time / _validation_stats.exec_count * 1000:.2f}ms)"
+            )
+        if _validation_stats.errors:
+            terminalreporter.write_line(f"\nValidation errors: {len(_validation_stats.errors)}")
+            for file_path, error in _validation_stats.errors:
+                terminalreporter.write_line(f"  {file_path}: {error}")
+
+
+if sys.version_info >= (3, 10):
+    from datetime import datetime, timezone
+
+    import time_machine
+
+    def _parse_time_string(time_str: str) -> datetime:
+        """Parse time string to datetime with UTC timezone."""
+        for fmt in (
+            "%Y-%m-%dT%H:%M:%S%z",
+            "%Y-%m-%d %H:%M:%S%z",
+            "%Y-%m-%dT%H:%M:%S",
+            "%Y-%m-%d %H:%M:%S",
+            "%Y-%m-%d",
+        ):
+            try:
+                dt = datetime.strptime(time_str, fmt)  # noqa: DTZ007
+                if dt.tzinfo is None:
+                    dt = dt.replace(tzinfo=timezone.utc)
+                return dt  # noqa: TRY300
+            except ValueError:  # noqa: PERF203
+                continue
+        return datetime.fromisoformat(time_str.replace("Z", "+00:00"))  # pragma: no cover
+
+    def freeze_time(time_to_freeze: str, **kwargs: Any) -> time_machine.travel:  # noqa: ARG001
+        """Freeze time using time-machine (100-200x faster than freezegun)."""
+        dt = _parse_time_string(time_to_freeze)
+        return time_machine.travel(dt, tick=False)
+
+else:
+    from freezegun import freeze_time as freeze_time  # noqa: PLC0414
+
+
+def _normalize_line_endings(text: str) -> str:
+    """Normalize line endings to LF for cross-platform comparison."""
+    return text.replace("\r\n", "\n")
+
+
+def _get_tox_env() -> str:  # pragma: no cover
+    """Get the current tox environment name from TOX_ENV_NAME or fallback.
+
+    Strips '-parallel' suffix since inline-snapshot requires -n0 (single process).
+    """
+    import os
+
+    env = os.environ.get("TOX_ENV_NAME", "<version>")
+    # Remove -parallel suffix since inline-snapshot needs single process mode
+    return env.removesuffix("-parallel")
+
+
+def _format_snapshot_hint(action: str) -> str:  # pragma: no cover
+    """Format a hint message for inline-snapshot commands with rich formatting."""
+    from io import StringIO
+
+    from rich.console import Console
+    from rich.text import Text
+
+    tox_env = _get_tox_env()
+    command = f"  tox run -e {tox_env} -- --inline-snapshot={action}"
+
+    description = "To update the expected file, run:" if action == "fix" else "To create the expected file, run:"
+
+    output = StringIO()
+    console = Console(file=output, force_terminal=True, width=200, soft_wrap=False)
+
+    console.print(Text(description, style="default"))
+    console.print(Text(command, style="bold cyan"))
+
+    return output.getvalue()
+
+
+def _format_new_content(content: str) -> str:  # pragma: no cover
+    """Format new content (for create mode) with green color."""
+    from io import StringIO
+
+    from rich.console import Console
+    from rich.text import Text
+
+    output = StringIO()
+    console = Console(file=output, force_terminal=True, width=200, soft_wrap=False)
+
+    for line in content.splitlines():
+        console.print(Text(f"+{line}", style="green"))
+
+    return output.getvalue()
+
+
+def _format_diff(expected: str, actual: str, expected_path: Path) -> str:  # pragma: no cover
+    """Format a unified diff between expected and actual content with colors."""
+    from io import StringIO
+
+    from rich.console import Console
+    from rich.text import Text
+
+    expected_lines = expected.splitlines(keepends=True)
+    actual_lines = actual.splitlines(keepends=True)
+    diff_lines = list(
+        difflib.unified_diff(
+            expected_lines,
+            actual_lines,
+            fromfile=str(expected_path),
+            tofile="actual",
+        )
+    )
+
+    if not diff_lines:
+        return ""
+
+    output = StringIO()
+    console = Console(file=output, force_terminal=True, width=200, soft_wrap=False)
+
+    for line in diff_lines:
+        line_stripped = line.rstrip("\n")
+        # Skip header lines since file path is already in the error message
+        if line.startswith(("---", "+++")):
+            continue
+        if line.startswith("@@"):
+            console.print(Text(line_stripped, style="cyan"))
+        elif line.startswith("-"):
+            console.print(Text(line_stripped, style="red"))
+        elif line.startswith("+"):
+            console.print(Text(line_stripped, style="green"))
+        else:
+            # Use default to override pytest's red color for E lines
+            console.print(Text(line_stripped, style="default"))
+
+    return output.getvalue()
+
+
+def _assert_with_external_file(content: str, expected_path: Path) -> None:
+    """Assert content matches external file, handling line endings."""
+    __tracebackhide__ = True
+    try:
+        expected = external_file(expected_path)
+    except FileNotFoundError:  # pragma: no cover
+        hint = _format_snapshot_hint("create")
+        formatted_content = _format_new_content(content)
+        msg = f"Expected file not found: {expected_path}\n{hint}\n{formatted_content}"
+        raise AssertionError(msg) from None  # pragma: no cover
+    normalized_content = _normalize_line_endings(content)
+    if isinstance(expected, str):  # pragma: no branch
+        normalized_expected = _normalize_line_endings(expected)
+        if normalized_content != normalized_expected:  # pragma: no cover
+            hint = _format_snapshot_hint("fix")
+            diff = _format_diff(normalized_expected, normalized_content, expected_path)
+            msg = f"Content mismatch for {expected_path}\n{hint}\n{diff}"
+            raise AssertionError(msg) from None
+    else:
+        assert expected == normalized_content  # pragma: no cover
+
+
+class AssertFileContent(Protocol):
+    """Protocol for file content assertion callable."""
+
+    def __call__(
+        self,
+        output_file: Path,
+        expected_name: str | Path | None = None,
+        encoding: str = "utf-8",
+        transform: Callable[[str], str] | None = None,
+    ) -> None:
+        """Assert file content matches expected output."""
+        ...
+
+
+def create_assert_file_content(
+    base_path: Path,
+) -> AssertFileContent:
+    """Create an assert function bound to a specific expected path.
+
+    Args:
+        base_path: The base path for expected files (e.g., EXPECTED_JSON_SCHEMA_PATH).
+
+    Returns:
+        A function that asserts file content matches expected.
+
+    Usage:
+        # In test module
+        assert_file_content = create_assert_file_content(EXPECTED_JSON_SCHEMA_PATH)
+
+        # In tests - infer from function name
+        assert_file_content(output_file)  # test_main_foo -> foo.py
+
+        # Explicit filename
+        assert_file_content(output_file, "custom.py")
+        assert_file_content(output_file, "subdir/bar.py")
+        assert_file_content(output_file, f"{expected_output}/file.py")
+    """
+
+    def _assert_file_content(
+        output_file: Path,
+        expected_name: str | Path | None = None,
+        encoding: str = "utf-8",
+        transform: Callable[[str], str] | None = None,
+    ) -> None:
+        """Assert that file content matches expected external file."""
+        __tracebackhide__ = True
+        if expected_name is None:
+            frame = inspect.currentframe()
+            assert frame is not None
+            assert frame.f_back is not None
+            func_name = frame.f_back.f_code.co_name
+            del frame
+            name = func_name
+            for prefix in ("test_main_", "test_"):
+                if name.startswith(prefix):
+                    name = name[len(prefix) :]
+                    break
+            expected_name = f"{name}.py"
+
+        expected_path = base_path / expected_name
+        content = output_file.read_text(encoding=encoding)
+        if transform is not None:
+            content = transform(content)
+        _assert_with_external_file(content, expected_path)
+
+    return _assert_file_content
+
+
+def assert_output(
+    output: str,
+    expected_path: Path,
+) -> None:
+    """Assert that output string matches expected external file.
+
+    Args:
+        output: The output string to compare (e.g., captured.out, parser.parse()).
+        expected_path: Path to the expected file.
+
+    Usage:
+        assert_output(captured.out, EXPECTED_PATH / "output.py")
+        assert_output(parser.parse(), EXPECTED_PATH / "output.py")
+    """
+    __tracebackhide__ = True
+    _assert_with_external_file(output, expected_path)
+
+
+def assert_directory_content(
+    output_dir: Path,
+    expected_dir: Path,
+    pattern: str = "*.py",
+    encoding: str = "utf-8",
+) -> None:
+    """Assert all files in output_dir match expected files in expected_dir.
+
+    Args:
+        output_dir: Directory containing generated output files.
+        expected_dir: Directory containing expected files.
+        pattern: Glob pattern for files to compare (default: "*.py").
+        encoding: File encoding (default: "utf-8").
+
+    Usage:
+        assert_directory_content(tmp_path / "model", EXPECTED_PATH / "main_modular")
+    """
+    __tracebackhide__ = True
+    output_files = {p.relative_to(output_dir) for p in output_dir.rglob(pattern)}
+    expected_files = {p.relative_to(expected_dir) for p in expected_dir.rglob(pattern)}
+
+    # Check for extra expected files (output missing files that are expected)
+    extra = expected_files - output_files
+    assert not extra, f"Expected files not in output: {extra}"
+
+    # Compare all output files (including new ones not yet in expected)
+    for output_path in output_dir.rglob(pattern):
+        relative_path = output_path.relative_to(output_dir)
+        expected_path = expected_dir / relative_path
+        result = output_path.read_text(encoding=encoding)
+        _assert_with_external_file(result, expected_path)
+
+
+def _get_full_body(result: object) -> str:
+    """Get full body from Result."""
+    return getattr(result, "body", "")
+
+
+def assert_parser_results(
+    results: dict,
+    expected_dir: Path,
+    pattern: str = "*.py",
+) -> None:
+    """Assert parser results match expected files.
+
+    Args:
+        results: Dictionary with string keys mapping to objects with .body attribute.
+        expected_dir: Directory containing expected files.
+        pattern: Glob pattern for files to compare (default: "*.py").
+
+    Usage:
+        results = {delimiter.join(p): r for p, r in parser.parse().items()}
+        assert_parser_results(results, EXPECTED_PATH / "parser_output")
+    """
+    __tracebackhide__ = True
+    for expected_path in expected_dir.rglob(pattern):
+        key = str(expected_path.relative_to(expected_dir))
+        result_obj = results.pop(key)
+        _assert_with_external_file(_get_full_body(result_obj), expected_path)
+
+
+def assert_parser_modules(
+    modules: dict,
+    expected_dir: Path,
+) -> None:
+    """Assert parser modules match expected files.
+
+    Args:
+        modules: Dictionary with tuple keys mapping to objects with .body attribute.
+        expected_dir: Directory containing expected files.
+
+    Usage:
+        modules = parser.parse()
+        assert_parser_modules(modules, EXPECTED_PATH / "parser_modular")
+    """
+    __tracebackhide__ = True
+    for paths, result in modules.items():
+        expected_path = expected_dir.joinpath(*paths)
+        _assert_with_external_file(_get_full_body(result), expected_path)
+
+
+@pytest.fixture(autouse=True)
+def _inline_snapshot_file_formats() -> None:
+    register_format_alias(".py", ".txt")
+    register_format_alias(".pyi", ".txt")
+    register_format_alias(".snapshot", ".txt")
+
+
+@pytest.fixture(scope="session")
+def min_version() -> str:
+    """Return minimum Python version as string."""
+    return f"3.{MIN_VERSION}"
+
+
+@pytest.fixture(scope="session", autouse=True)
+def _preload_heavy_modules() -> None:
+    """Pre-import heavy modules once per session to warm up the import cache.
+
+    This reduces per-test overhead when running with pytest-xdist,
+    as each worker only pays the import cost once at session start.
+    """
+    import black  # noqa: F401
+    import inflect  # noqa: F401
+    import isort  # noqa: F401
+
+    import datamodel_code_generator  # noqa: F401
+
+
+def validate_generated_code(
+    code: str,
+    file_path: str,
+    *,
+    do_exec: bool = False,
+) -> None:
+    """Validate generated code by compiling and optionally executing it.
+
+    Args:
+        code: The generated Python code to validate.
+        file_path: Path to the file (for error reporting).
+        do_exec: Whether to execute the code after compiling (default: False).
+    """
+    try:
+        start = time.perf_counter()
+        compiled = compile(code, file_path, "exec")
+        _validation_stats.record_compile(time.perf_counter() - start)
+
+        if do_exec:
+            start = time.perf_counter()
+            exec(compiled, {})
+            _validation_stats.record_exec(time.perf_counter() - start)
+    except SyntaxError as e:  # pragma: no cover
+        _validation_stats.record_error(file_path, f"SyntaxError: {e}")
+        raise
+    except Exception as e:  # pragma: no cover
+        _validation_stats.record_error(file_path, f"{type(e).__name__}: {e}")
+        raise
diff -pruN 0.26.4-3/tests/data/aliases/hierarchical_aliases_scoped.json 0.45.0-1/tests/data/aliases/hierarchical_aliases_scoped.json
--- 0.26.4-3/tests/data/aliases/hierarchical_aliases_scoped.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/aliases/hierarchical_aliases_scoped.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,5 @@
+{
+  "Root.name": "root_name",
+  "User.name": "user_name",
+  "Address.name": "address_name"
+}
diff -pruN 0.26.4-3/tests/data/config/formatter_kwargs.json 0.45.0-1/tests/data/config/formatter_kwargs.json
--- 0.26.4-3/tests/data/config/formatter_kwargs.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/config/formatter_kwargs.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1 @@
+{}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/csv/simple.csv 0.45.0-1/tests/data/csv/simple.csv
--- 0.26.4-3/tests/data/csv/simple.csv	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/csv/simple.csv	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,4 @@
+id,name,tel,zip code
+1,taro,0123456789,98765
+2,ken,234567891,98764
+3,ichiro,345678912,98763
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/custom_file_header.txt 0.45.0-1/tests/data/custom_file_header.txt
--- 0.26.4-3/tests/data/custom_file_header.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/custom_file_header.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# multiline custom ;
+# header ;
+# file ;
diff -pruN 0.26.4-3/tests/data/custom_file_header_comments_only.txt 0.45.0-1/tests/data/custom_file_header_comments_only.txt
--- 0.26.4-3/tests/data/custom_file_header_comments_only.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/custom_file_header_comments_only.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+# Just a comment
+# Another comment
diff -pruN 0.26.4-3/tests/data/custom_file_header_docstring.txt 0.45.0-1/tests/data/custom_file_header_docstring.txt
--- 0.26.4-3/tests/data/custom_file_header_docstring.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/custom_file_header_docstring.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,5 @@
+"""
+Custom module docstring header.
+
+This is a multi-line docstring used for testing.
+"""
diff -pruN 0.26.4-3/tests/data/custom_file_header_invalid_syntax.txt 0.45.0-1/tests/data/custom_file_header_invalid_syntax.txt
--- 0.26.4-3/tests/data/custom_file_header_invalid_syntax.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/custom_file_header_invalid_syntax.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+# Valid comment
+def incomplete(
diff -pruN 0.26.4-3/tests/data/custom_file_header_with_docstring.txt 0.45.0-1/tests/data/custom_file_header_with_docstring.txt
--- 0.26.4-3/tests/data/custom_file_header_with_docstring.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/custom_file_header_with_docstring.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,5 @@
+"""Custom module docstring.
+
+This module contains generated models.
+"""
+
diff -pruN 0.26.4-3/tests/data/custom_file_header_with_docstring_and_import.txt 0.45.0-1/tests/data/custom_file_header_with_docstring_and_import.txt
--- 0.26.4-3/tests/data/custom_file_header_with_docstring_and_import.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/custom_file_header_with_docstring_and_import.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+"""Custom module docstring.
+
+This module contains generated models.
+"""
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
diff -pruN 0.26.4-3/tests/data/custom_file_header_with_import.txt 0.45.0-1/tests/data/custom_file_header_with_import.txt
--- 0.26.4-3/tests/data/custom_file_header_with_import.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/custom_file_header_with_import.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,5 @@
+# Custom header with import
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
diff -pruN 0.26.4-3/tests/data/expected/main/csv/csv_file_simple.py 0.45.0-1/tests/data/expected/main/csv/csv_file_simple.py
--- 0.26.4-3/tests/data/expected/main/csv/csv_file_simple.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/csv/csv_file_simple.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  simple.csv
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Model(BaseModel):
+    id: str
+    name: str
+    tel: str
+    zip_code: str = Field(..., alias='zip code')
diff -pruN 0.26.4-3/tests/data/expected/main/csv/csv_stdin_simple.py 0.45.0-1/tests/data/expected/main/csv/csv_stdin_simple.py
--- 0.26.4-3/tests/data/expected/main/csv/csv_stdin_simple.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/csv/csv_stdin_simple.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  <stdin>
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Model(BaseModel):
+    id: str
+    name: str
+    tel: str
+    zip_code: str = Field(..., alias='zip code')
diff -pruN 0.26.4-3/tests/data/expected/main/direct_input_dict.py 0.45.0-1/tests/data/expected/main/direct_input_dict.py
--- 0.26.4-3/tests/data/expected/main/direct_input_dict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/direct_input_dict.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  <dict>
+#   timestamp: 2024-12-14T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Bar(BaseModel):
+    baz: int
+
+
+class Model(BaseModel):
+    foo: int
+    bar: Bar
diff -pruN 0.26.4-3/tests/data/expected/main/frozen_dataclasses.py 0.45.0-1/tests/data/expected/main/frozen_dataclasses.py
--- 0.26.4-3/tests/data/expected/main/frozen_dataclasses.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/frozen_dataclasses.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  simple_frozen_test.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Optional
+
+
+@dataclass(frozen=True)
+class User:
+    name: str
+    age: int
+    email: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/frozen_dataclasses_keyword_only.py 0.45.0-1/tests/data/expected/main/frozen_dataclasses_keyword_only.py
--- 0.26.4-3/tests/data/expected/main/frozen_dataclasses_keyword_only.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/frozen_dataclasses_keyword_only.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  simple_frozen_test.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Optional
+
+
+@dataclass(frozen=True, kw_only=True)
+class User:
+    name: str
+    age: int
+    email: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/generate_cli_command/false_boolean.txt 0.45.0-1/tests/data/expected/main/generate_cli_command/false_boolean.txt
--- 0.26.4-3/tests/data/expected/main/generate_cli_command/false_boolean.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/generate_cli_command/false_boolean.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+datamodel-codegen --input schema.yaml
+
diff -pruN 0.26.4-3/tests/data/expected/main/generate_cli_command/list_option.txt 0.45.0-1/tests/data/expected/main/generate_cli_command/list_option.txt
--- 0.26.4-3/tests/data/expected/main/generate_cli_command/list_option.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/generate_cli_command/list_option.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+datamodel-codegen --input schema.yaml --strict-types str int
+
diff -pruN 0.26.4-3/tests/data/expected/main/generate_cli_command/no_use_specialized_enum.txt 0.45.0-1/tests/data/expected/main/generate_cli_command/no_use_specialized_enum.txt
--- 0.26.4-3/tests/data/expected/main/generate_cli_command/no_use_specialized_enum.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/generate_cli_command/no_use_specialized_enum.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+datamodel-codegen --input schema.yaml --no-use-specialized-enum
+
diff -pruN 0.26.4-3/tests/data/expected/main/generate_cli_command/true_boolean.txt 0.45.0-1/tests/data/expected/main/generate_cli_command/true_boolean.txt
--- 0.26.4-3/tests/data/expected/main/generate_cli_command/true_boolean.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/generate_cli_command/true_boolean.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+datamodel-codegen --input schema.yaml --snake-case-field
+
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/additional_imports.py 0.45.0-1/tests/data/expected/main/graphql/additional_imports.py
--- 0.26.4-3/tests/data/expected/main/graphql/additional_imports.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/additional_imports.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,42 @@
+# generated by datamodel-codegen:
+#   filename:  additional-imports.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import date, datetime
+from typing import Literal, Optional
+
+from mymodule.myclass import MyCustomPythonClass
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+Date: TypeAlias = date
+
+
+DateTime: TypeAlias = datetime
+"""
+DateTime (ISO8601, example: 2020-01-01T10:11:12+00:00)
+"""
+
+
+MyCustomClass: TypeAlias = MyCustomPythonClass
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    a: Date
+    b: DateTime
+    c: MyCustomClass
+    typename__: Optional[Literal['A']] = Field('A', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/annotated.py 0.45.0-1/tests/data/expected/main/graphql/annotated.py
--- 0.26.4-3/tests/data/expected/main/graphql/annotated.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/annotated.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  annotated.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Literal, Optional
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAliasType
+
+Boolean = TypeAliasType("Boolean", bool)
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String = TypeAliasType("String", str)
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    field: String
+    listField: List[String]
+    listListField: List[List[String]]
+    listOptionalField: List[Optional[String]]
+    optionalField: Optional[String] = None
+    optionalListField: Optional[List[String]] = None
+    optionalListOptionalField: Optional[List[Optional[String]]] = None
+    typename__: Annotated[Optional[Literal['A']], Field(alias='__typename')] = 'A'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/annotated_field_aliases.py 0.45.0-1/tests/data/expected/main/graphql/annotated_field_aliases.py
--- 0.26.4-3/tests/data/expected/main/graphql/annotated_field_aliases.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/annotated_field_aliases.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  field-aliases.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Literal, Optional
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAliasType
+
+Boolean = TypeAliasType("Boolean", bool)
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+DateTime = TypeAliasType("DateTime", str)
+
+
+String = TypeAliasType("String", str)
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class DateTimePeriod(BaseModel):
+    periodFrom: Annotated[DateTime, Field(alias='from')]
+    periodTo: Annotated[DateTime, Field(alias='to')]
+    typename__: Annotated[
+        Optional[Literal['DateTimePeriod']], Field(alias='__typename')
+    ] = 'DateTimePeriod'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/annotated_use_default_kwarg.py 0.45.0-1/tests/data/expected/main/graphql/annotated_use_default_kwarg.py
--- 0.26.4-3/tests/data/expected/main/graphql/annotated_use_default_kwarg.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/annotated_use_default_kwarg.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  annotated.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, Optional
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    field: String
+    listField: List[String]
+    listListField: List[List[String]]
+    listOptionalField: List[Optional[String]]
+    optionalField: Optional[String] = None
+    optionalListField: Optional[List[String]] = None
+    optionalListOptionalField: Optional[List[Optional[String]]] = None
+    typename__: Optional[Literal['A']] = Field(default='A', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/annotated_use_standard_collections.py 0.45.0-1/tests/data/expected/main/graphql/annotated_use_standard_collections.py
--- 0.26.4-3/tests/data/expected/main/graphql/annotated_use_standard_collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/annotated_use_standard_collections.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  annotated.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Literal, Optional
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAliasType
+
+Boolean = TypeAliasType("Boolean", bool)
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String = TypeAliasType("String", str)
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    field: String
+    listField: list[String]
+    listListField: list[list[String]]
+    listOptionalField: list[Optional[String]]
+    optionalField: Optional[String] = None
+    optionalListField: Optional[list[String]] = None
+    optionalListOptionalField: Optional[list[Optional[String]]] = None
+    typename__: Annotated[Optional[Literal['A']], Field(alias='__typename')] = 'A'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/annotated_use_standard_collections_use_union_operator.py 0.45.0-1/tests/data/expected/main/graphql/annotated_use_standard_collections_use_union_operator.py
--- 0.26.4-3/tests/data/expected/main/graphql/annotated_use_standard_collections_use_union_operator.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/annotated_use_standard_collections_use_union_operator.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  annotated.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Literal
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAliasType
+
+Boolean = TypeAliasType("Boolean", bool)
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String = TypeAliasType("String", str)
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    field: String
+    listField: list[String]
+    listListField: list[list[String]]
+    listOptionalField: list[String | None]
+    optionalField: String | None = None
+    optionalListField: list[String] | None = None
+    optionalListOptionalField: list[String | None] | None = None
+    typename__: Annotated[Literal['A'] | None, Field(alias='__typename')] = 'A'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/annotated_use_union_operator.py 0.45.0-1/tests/data/expected/main/graphql/annotated_use_union_operator.py
--- 0.26.4-3/tests/data/expected/main/graphql/annotated_use_union_operator.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/annotated_use_union_operator.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  annotated.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Literal
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAliasType
+
+Boolean = TypeAliasType("Boolean", bool)
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String = TypeAliasType("String", str)
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    field: String
+    listField: List[String]
+    listListField: List[List[String]]
+    listOptionalField: List[String | None]
+    optionalField: String | None = None
+    optionalListField: List[String] | None = None
+    optionalListOptionalField: List[String | None] | None = None
+    typename__: Annotated[Literal['A'] | None, Field(alias='__typename')] = 'A'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/casing.py 0.45.0-1/tests/data/expected/main/graphql/casing.py
--- 0.26.4-3/tests/data/expected/main/graphql/casing.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/casing.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,59 @@
+# generated by datamodel-codegen:
+#   filename:  casing.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal, Optional
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+Int: TypeAlias = int
+"""
+The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class Lowercase(Enum):
+    foo = 'foo'
+
+
+class Conflict(BaseModel):
+    Bar: Int
+    bar: String
+    typename__: Optional[Literal['Conflict']] = Field('Conflict', alias='__typename')
+
+
+class ConflictModel(BaseModel):
+    Foo: Int
+    foo: String
+    typename__: Optional[Literal['conflict']] = Field('conflict', alias='__typename')
+
+
+class Lowercasetype(BaseModel):
+    foo: Int
+    typename__: Optional[Literal['lowercasetype']] = Field(
+        'lowercasetype', alias='__typename'
+    )
+
+
+class Ref(BaseModel):
+    bar: Lowercase
+    baz: Lowercasetype
+    eggs: Conflict
+    spam: ConflictModel
+    typename__: Optional[Literal['Ref']] = Field('Ref', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/custom_formatters.py 0.45.0-1/tests/data/expected/main/graphql/custom_formatters.py
--- 0.26.4-3/tests/data/expected/main/graphql/custom_formatters.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/custom_formatters.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+# generated by datamodel-codegen:
+#   filename:  custom-scalar-types.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+# a comment
+from __future__ import annotations
+
+from typing import Literal, Optional
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+ID: TypeAlias = str
+"""
+The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID.
+"""
+
+
+Long: TypeAlias = str
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    duration: Long
+    id: ID
+    typename__: Optional[Literal['A']] = Field('A', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/custom_scalar_types.py 0.45.0-1/tests/data/expected/main/graphql/custom_scalar_types.py
--- 0.26.4-3/tests/data/expected/main/graphql/custom_scalar_types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/custom_scalar_types.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,36 @@
+# generated by datamodel-codegen:
+#   filename:  custom-scalar-types.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+ID: TypeAlias = str
+"""
+The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID.
+"""
+
+
+Long: TypeAlias = int
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    duration: Long
+    id: ID
+    typename__: Optional[Literal['A']] = Field('A', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/different_types_of_fields.py 0.45.0-1/tests/data/expected/main/graphql/different_types_of_fields.py
--- 0.26.4-3/tests/data/expected/main/graphql/different_types_of_fields.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/different_types_of_fields.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,40 @@
+# generated by datamodel-codegen:
+#   filename:  different-types-of-fields.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, Optional
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    field: String
+    listField: List[String]
+    listListField: List[List[String]]
+    listListOptionalField: List[List[Optional[String]]]
+    listOptionalField: List[Optional[String]]
+    listOptionalListField: List[Optional[List[String]]]
+    listOptionalListOptionalField: List[Optional[List[Optional[String]]]]
+    optionalField: Optional[String] = None
+    optionalListListField: Optional[List[List[String]]] = None
+    optionalListListOptionalField: Optional[List[List[Optional[String]]]] = None
+    optionalListOptionalField: Optional[List[Optional[String]]] = None
+    optionalListOptionalListField: Optional[List[Optional[List[String]]]] = None
+    optionalListOptionalListOptionalField: Optional[
+        List[Optional[List[Optional[String]]]]
+    ] = None
+    typename__: Optional[Literal['A']] = Field('A', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/enum_literals_all.py 0.45.0-1/tests/data/expected/main/graphql/enum_literals_all.py
--- 0.26.4-3/tests/data/expected/main/graphql/enum_literals_all.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/enum_literals_all.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+# generated by datamodel-codegen:
+#   filename:  enums.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal
+
+from pydantic import BaseModel
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class Color(BaseModel):
+    __root__: Literal['BLUE', 'GREEN', 'RED']
+
+
+class EmployeeShiftStatus(BaseModel):
+    """
+    Employee shift status
+    """
+
+    __root__: Literal['NOT_ON_SHIFT', 'ON_SHIFT']
+
+
+class EnumWithOneField(BaseModel):
+    __root__: Literal['FIELD']
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/enum_literals_one.py 0.45.0-1/tests/data/expected/main/graphql/enum_literals_one.py
--- 0.26.4-3/tests/data/expected/main/graphql/enum_literals_one.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/enum_literals_one.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,41 @@
+# generated by datamodel-codegen:
+#   filename:  enums.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal
+
+from pydantic import BaseModel
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class Color(Enum):
+    BLUE = 'BLUE'
+    GREEN = 'GREEN'
+    RED = 'RED'
+
+
+class EmployeeShiftStatus(Enum):
+    """
+    Employee shift status
+    """
+
+    NOT_ON_SHIFT = 'NOT_ON_SHIFT'
+    ON_SHIFT = 'ON_SHIFT'
+
+
+class EnumWithOneField(BaseModel):
+    __root__: Literal['FIELD']
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/enums.py 0.45.0-1/tests/data/expected/main/graphql/enums.py
--- 0.26.4-3/tests/data/expected/main/graphql/enums.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/enums.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  enums.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class Color(Enum):
+    BLUE = 'BLUE'
+    GREEN = 'GREEN'
+    RED = 'RED'
+
+
+class EmployeeShiftStatus(Enum):
+    """
+    Employee shift status
+    """
+
+    NOT_ON_SHIFT = 'NOT_ON_SHIFT'
+    ON_SHIFT = 'ON_SHIFT'
+
+
+class EnumWithOneField(Enum):
+    FIELD = 'FIELD'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/enums_no_specialized.py 0.45.0-1/tests/data/expected/main/graphql/enums_no_specialized.py
--- 0.26.4-3/tests/data/expected/main/graphql/enums_no_specialized.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/enums_no_specialized.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,38 @@
+# generated by datamodel-codegen:
+#   filename:  enums.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class Color(Enum):
+    BLUE = 'BLUE'
+    GREEN = 'GREEN'
+    RED = 'RED'
+
+
+class EmployeeShiftStatus(Enum):
+    """
+    Employee shift status
+    """
+
+    NOT_ON_SHIFT = 'NOT_ON_SHIFT'
+    ON_SHIFT = 'ON_SHIFT'
+
+
+class EnumWithOneField(Enum):
+    FIELD = 'FIELD'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/enums_specialized.py 0.45.0-1/tests/data/expected/main/graphql/enums_specialized.py
--- 0.26.4-3/tests/data/expected/main/graphql/enums_specialized.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/enums_specialized.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,38 @@
+# generated by datamodel-codegen:
+#   filename:  enums.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import StrEnum
+from typing import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class Color(StrEnum):
+    BLUE = 'BLUE'
+    GREEN = 'GREEN'
+    RED = 'RED'
+
+
+class EmployeeShiftStatus(StrEnum):
+    """
+    Employee shift status
+    """
+
+    NOT_ON_SHIFT = 'NOT_ON_SHIFT'
+    ON_SHIFT = 'ON_SHIFT'
+
+
+class EnumWithOneField(StrEnum):
+    FIELD = 'FIELD'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/enums_typed_dict.py 0.45.0-1/tests/data/expected/main/graphql/enums_typed_dict.py
--- 0.26.4-3/tests/data/expected/main/graphql/enums_typed_dict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/enums_typed_dict.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  enums.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal
+
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+Color: TypeAlias = Literal['BLUE', 'GREEN', 'RED']
+
+
+EmployeeShiftStatus: TypeAlias = Literal['NOT_ON_SHIFT', 'ON_SHIFT']
+"""
+Employee shift status
+"""
+
+
+EnumWithOneField: TypeAlias = Literal['FIELD']
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/enums_using_subclass.py 0.45.0-1/tests/data/expected/main/graphql/enums_using_subclass.py
--- 0.26.4-3/tests/data/expected/main/graphql/enums_using_subclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/enums_using_subclass.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  enums.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class Color(str, Enum):
+    BLUE = 'BLUE'
+    GREEN = 'GREEN'
+    RED = 'RED'
+
+
+class EmployeeShiftStatus(str, Enum):
+    """
+    Employee shift status
+    """
+
+    NOT_ON_SHIFT = 'NOT_ON_SHIFT'
+    ON_SHIFT = 'ON_SHIFT'
+
+
+class EnumWithOneField(str, Enum):
+    FIELD = 'FIELD'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/field_aliases.py 0.45.0-1/tests/data/expected/main/graphql/field_aliases.py
--- 0.26.4-3/tests/data/expected/main/graphql/field_aliases.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/field_aliases.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  field-aliases.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+DateTime: TypeAlias = str
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class DateTimePeriod(BaseModel):
+    periodFrom: DateTime = Field(..., alias='from')
+    periodTo: DateTime = Field(..., alias='to')
+    typename__: Optional[Literal['DateTimePeriod']] = Field(
+        'DateTimePeriod', alias='__typename'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/simple_star_wars.py 0.45.0-1/tests/data/expected/main/graphql/simple_star_wars.py
--- 0.26.4-3/tests/data/expected/main/graphql/simple_star_wars.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/simple_star_wars.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,159 @@
+# generated by datamodel-codegen:
+#   filename:  simple-star-wars.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, Optional
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+ID: TypeAlias = str
+"""
+The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID.
+"""
+
+
+Int: TypeAlias = int
+"""
+The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class Film(BaseModel):
+    characters: List[Person]
+    characters_ids: List[ID]
+    director: String
+    episode_id: Int
+    id: ID
+    opening_crawl: String
+    planets: List[Planet]
+    planets_ids: List[ID]
+    producer: Optional[String] = None
+    release_date: String
+    species: List[Species]
+    species_ids: List[ID]
+    starships: List[Starship]
+    starships_ids: List[ID]
+    title: String
+    vehicles: List[Vehicle]
+    vehicles_ids: List[ID]
+    typename__: Optional[Literal['Film']] = Field('Film', alias='__typename')
+
+
+class Person(BaseModel):
+    birth_year: Optional[String] = None
+    eye_color: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    gender: Optional[String] = None
+    hair_color: Optional[String] = None
+    height: Optional[Int] = None
+    homeworld: Optional[Planet] = None
+    homeworld_id: Optional[ID] = None
+    id: ID
+    mass: Optional[Int] = None
+    name: String
+    skin_color: Optional[String] = None
+    species: List[Species]
+    species_ids: List[ID]
+    starships: List[Starship]
+    starships_ids: List[ID]
+    vehicles: List[Vehicle]
+    vehicles_ids: List[ID]
+    typename__: Optional[Literal['Person']] = Field('Person', alias='__typename')
+
+
+class Planet(BaseModel):
+    climate: Optional[String] = None
+    diameter: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    gravity: Optional[String] = None
+    id: ID
+    name: String
+    orbital_period: Optional[String] = None
+    population: Optional[String] = None
+    residents: List[Person]
+    residents_ids: List[ID]
+    rotation_period: Optional[String] = None
+    surface_water: Optional[String] = None
+    terrain: Optional[String] = None
+    typename__: Optional[Literal['Planet']] = Field('Planet', alias='__typename')
+
+
+class Species(BaseModel):
+    average_height: Optional[String] = None
+    average_lifespan: Optional[String] = None
+    classification: Optional[String] = None
+    designation: Optional[String] = None
+    eye_colors: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    hair_colors: Optional[String] = None
+    id: ID
+    language: Optional[String] = None
+    name: String
+    people: List[Person]
+    people_ids: List[ID]
+    skin_colors: Optional[String] = None
+    typename__: Optional[Literal['Species']] = Field('Species', alias='__typename')
+
+
+class Starship(BaseModel):
+    MGLT: Optional[String] = None
+    cargo_capacity: Optional[String] = None
+    consumables: Optional[String] = None
+    cost_in_credits: Optional[String] = None
+    crew: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    hyperdrive_rating: Optional[String] = None
+    id: ID
+    length: Optional[String] = None
+    manufacturer: Optional[String] = None
+    max_atmosphering_speed: Optional[String] = None
+    model: Optional[String] = None
+    name: String
+    passengers: Optional[String] = None
+    pilots: List[Person]
+    pilots_ids: List[ID]
+    starship_class: Optional[String] = None
+    typename__: Optional[Literal['Starship']] = Field('Starship', alias='__typename')
+
+
+class Vehicle(BaseModel):
+    cargo_capacity: Optional[String] = None
+    consumables: Optional[String] = None
+    cost_in_credits: Optional[String] = None
+    crew: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    length: Optional[String] = None
+    manufacturer: Optional[String] = None
+    max_atmosphering_speed: Optional[String] = None
+    model: Optional[String] = None
+    name: String
+    passengers: Optional[String] = None
+    pilots: List[Person]
+    pilots_ids: List[ID]
+    vehicle_class: Optional[String] = None
+    typename__: Optional[Literal['Vehicle']] = Field('Vehicle', alias='__typename')
+
+
+Film.update_forward_refs()
+Person.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/simple_star_wars_dataclass.py 0.45.0-1/tests/data/expected/main/graphql/simple_star_wars_dataclass.py
--- 0.26.4-3/tests/data/expected/main/graphql/simple_star_wars_dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/simple_star_wars_dataclass.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,161 @@
+# generated by datamodel-codegen:
+#   filename:  simple-star-wars.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import List, Literal, Optional
+
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+ID: TypeAlias = str
+"""
+The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID.
+"""
+
+
+Int: TypeAlias = int
+"""
+The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+@dataclass
+class Film:
+    characters: List[Person]
+    characters_ids: List[ID]
+    director: String
+    episode_id: Int
+    id: ID
+    opening_crawl: String
+    planets: List[Planet]
+    planets_ids: List[ID]
+    release_date: String
+    species: List[Species]
+    species_ids: List[ID]
+    starships: List[Starship]
+    starships_ids: List[ID]
+    title: String
+    vehicles: List[Vehicle]
+    vehicles_ids: List[ID]
+    producer: Optional[String] = None
+    typename__: Optional[Literal['Film']] = 'Film'
+
+
+@dataclass
+class Person:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    species: List[Species]
+    species_ids: List[ID]
+    starships: List[Starship]
+    starships_ids: List[ID]
+    vehicles: List[Vehicle]
+    vehicles_ids: List[ID]
+    birth_year: Optional[String] = None
+    eye_color: Optional[String] = None
+    gender: Optional[String] = None
+    hair_color: Optional[String] = None
+    height: Optional[Int] = None
+    homeworld: Optional[Planet] = None
+    homeworld_id: Optional[ID] = None
+    mass: Optional[Int] = None
+    skin_color: Optional[String] = None
+    typename__: Optional[Literal['Person']] = 'Person'
+
+
+@dataclass
+class Planet:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    residents: List[Person]
+    residents_ids: List[ID]
+    climate: Optional[String] = None
+    diameter: Optional[String] = None
+    gravity: Optional[String] = None
+    orbital_period: Optional[String] = None
+    population: Optional[String] = None
+    rotation_period: Optional[String] = None
+    surface_water: Optional[String] = None
+    terrain: Optional[String] = None
+    typename__: Optional[Literal['Planet']] = 'Planet'
+
+
+@dataclass
+class Species:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    people: List[Person]
+    people_ids: List[ID]
+    average_height: Optional[String] = None
+    average_lifespan: Optional[String] = None
+    classification: Optional[String] = None
+    designation: Optional[String] = None
+    eye_colors: Optional[String] = None
+    hair_colors: Optional[String] = None
+    language: Optional[String] = None
+    skin_colors: Optional[String] = None
+    typename__: Optional[Literal['Species']] = 'Species'
+
+
+@dataclass
+class Starship:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    pilots: List[Person]
+    pilots_ids: List[ID]
+    MGLT: Optional[String] = None
+    cargo_capacity: Optional[String] = None
+    consumables: Optional[String] = None
+    cost_in_credits: Optional[String] = None
+    crew: Optional[String] = None
+    hyperdrive_rating: Optional[String] = None
+    length: Optional[String] = None
+    manufacturer: Optional[String] = None
+    max_atmosphering_speed: Optional[String] = None
+    model: Optional[String] = None
+    passengers: Optional[String] = None
+    starship_class: Optional[String] = None
+    typename__: Optional[Literal['Starship']] = 'Starship'
+
+
+@dataclass
+class Vehicle:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    pilots: List[Person]
+    pilots_ids: List[ID]
+    cargo_capacity: Optional[String] = None
+    consumables: Optional[String] = None
+    cost_in_credits: Optional[String] = None
+    crew: Optional[String] = None
+    length: Optional[String] = None
+    manufacturer: Optional[String] = None
+    max_atmosphering_speed: Optional[String] = None
+    model: Optional[String] = None
+    passengers: Optional[String] = None
+    vehicle_class: Optional[String] = None
+    typename__: Optional[Literal['Vehicle']] = 'Vehicle'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/simple_star_wars_dataclass_arguments.py 0.45.0-1/tests/data/expected/main/graphql/simple_star_wars_dataclass_arguments.py
--- 0.26.4-3/tests/data/expected/main/graphql/simple_star_wars_dataclass_arguments.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/simple_star_wars_dataclass_arguments.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,161 @@
+# generated by datamodel-codegen:
+#   filename:  simple-star-wars.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import List, Literal, Optional
+
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+ID: TypeAlias = str
+"""
+The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID.
+"""
+
+
+Int: TypeAlias = int
+"""
+The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+@dataclass(order=True, slots=True)
+class Film:
+    characters: List[Person]
+    characters_ids: List[ID]
+    director: String
+    episode_id: Int
+    id: ID
+    opening_crawl: String
+    planets: List[Planet]
+    planets_ids: List[ID]
+    release_date: String
+    species: List[Species]
+    species_ids: List[ID]
+    starships: List[Starship]
+    starships_ids: List[ID]
+    title: String
+    vehicles: List[Vehicle]
+    vehicles_ids: List[ID]
+    producer: Optional[String] = None
+    typename__: Optional[Literal['Film']] = 'Film'
+
+
+@dataclass(order=True, slots=True)
+class Person:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    species: List[Species]
+    species_ids: List[ID]
+    starships: List[Starship]
+    starships_ids: List[ID]
+    vehicles: List[Vehicle]
+    vehicles_ids: List[ID]
+    birth_year: Optional[String] = None
+    eye_color: Optional[String] = None
+    gender: Optional[String] = None
+    hair_color: Optional[String] = None
+    height: Optional[Int] = None
+    homeworld: Optional[Planet] = None
+    homeworld_id: Optional[ID] = None
+    mass: Optional[Int] = None
+    skin_color: Optional[String] = None
+    typename__: Optional[Literal['Person']] = 'Person'
+
+
+@dataclass(order=True, slots=True)
+class Planet:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    residents: List[Person]
+    residents_ids: List[ID]
+    climate: Optional[String] = None
+    diameter: Optional[String] = None
+    gravity: Optional[String] = None
+    orbital_period: Optional[String] = None
+    population: Optional[String] = None
+    rotation_period: Optional[String] = None
+    surface_water: Optional[String] = None
+    terrain: Optional[String] = None
+    typename__: Optional[Literal['Planet']] = 'Planet'
+
+
+@dataclass(order=True, slots=True)
+class Species:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    people: List[Person]
+    people_ids: List[ID]
+    average_height: Optional[String] = None
+    average_lifespan: Optional[String] = None
+    classification: Optional[String] = None
+    designation: Optional[String] = None
+    eye_colors: Optional[String] = None
+    hair_colors: Optional[String] = None
+    language: Optional[String] = None
+    skin_colors: Optional[String] = None
+    typename__: Optional[Literal['Species']] = 'Species'
+
+
+@dataclass(order=True, slots=True)
+class Starship:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    pilots: List[Person]
+    pilots_ids: List[ID]
+    MGLT: Optional[String] = None
+    cargo_capacity: Optional[String] = None
+    consumables: Optional[String] = None
+    cost_in_credits: Optional[String] = None
+    crew: Optional[String] = None
+    hyperdrive_rating: Optional[String] = None
+    length: Optional[String] = None
+    manufacturer: Optional[String] = None
+    max_atmosphering_speed: Optional[String] = None
+    model: Optional[String] = None
+    passengers: Optional[String] = None
+    starship_class: Optional[String] = None
+    typename__: Optional[Literal['Starship']] = 'Starship'
+
+
+@dataclass(order=True, slots=True)
+class Vehicle:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    pilots: List[Person]
+    pilots_ids: List[ID]
+    cargo_capacity: Optional[String] = None
+    consumables: Optional[String] = None
+    cost_in_credits: Optional[String] = None
+    crew: Optional[String] = None
+    length: Optional[String] = None
+    manufacturer: Optional[String] = None
+    max_atmosphering_speed: Optional[String] = None
+    model: Optional[String] = None
+    passengers: Optional[String] = None
+    vehicle_class: Optional[String] = None
+    typename__: Optional[Literal['Vehicle']] = 'Vehicle'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/simple_star_wars_dataclass_frozen_kw_only.py 0.45.0-1/tests/data/expected/main/graphql/simple_star_wars_dataclass_frozen_kw_only.py
--- 0.26.4-3/tests/data/expected/main/graphql/simple_star_wars_dataclass_frozen_kw_only.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/simple_star_wars_dataclass_frozen_kw_only.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,159 @@
+# generated by datamodel-codegen:
+#   filename:  simple-star-wars.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import List, Literal, Optional, TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+ID: TypeAlias = str
+"""
+The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID.
+"""
+
+
+Int: TypeAlias = int
+"""
+The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+@dataclass(frozen=True, kw_only=True)
+class Film:
+    characters: List[Person]
+    characters_ids: List[ID]
+    director: String
+    episode_id: Int
+    id: ID
+    opening_crawl: String
+    planets: List[Planet]
+    planets_ids: List[ID]
+    release_date: String
+    species: List[Species]
+    species_ids: List[ID]
+    starships: List[Starship]
+    starships_ids: List[ID]
+    title: String
+    vehicles: List[Vehicle]
+    vehicles_ids: List[ID]
+    producer: Optional[String] = None
+    typename__: Optional[Literal['Film']] = 'Film'
+
+
+@dataclass(frozen=True, kw_only=True)
+class Person:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    species: List[Species]
+    species_ids: List[ID]
+    starships: List[Starship]
+    starships_ids: List[ID]
+    vehicles: List[Vehicle]
+    vehicles_ids: List[ID]
+    birth_year: Optional[String] = None
+    eye_color: Optional[String] = None
+    gender: Optional[String] = None
+    hair_color: Optional[String] = None
+    height: Optional[Int] = None
+    homeworld: Optional[Planet] = None
+    homeworld_id: Optional[ID] = None
+    mass: Optional[Int] = None
+    skin_color: Optional[String] = None
+    typename__: Optional[Literal['Person']] = 'Person'
+
+
+@dataclass(frozen=True, kw_only=True)
+class Planet:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    residents: List[Person]
+    residents_ids: List[ID]
+    climate: Optional[String] = None
+    diameter: Optional[String] = None
+    gravity: Optional[String] = None
+    orbital_period: Optional[String] = None
+    population: Optional[String] = None
+    rotation_period: Optional[String] = None
+    surface_water: Optional[String] = None
+    terrain: Optional[String] = None
+    typename__: Optional[Literal['Planet']] = 'Planet'
+
+
+@dataclass(frozen=True, kw_only=True)
+class Species:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    people: List[Person]
+    people_ids: List[ID]
+    average_height: Optional[String] = None
+    average_lifespan: Optional[String] = None
+    classification: Optional[String] = None
+    designation: Optional[String] = None
+    eye_colors: Optional[String] = None
+    hair_colors: Optional[String] = None
+    language: Optional[String] = None
+    skin_colors: Optional[String] = None
+    typename__: Optional[Literal['Species']] = 'Species'
+
+
+@dataclass(frozen=True, kw_only=True)
+class Starship:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    pilots: List[Person]
+    pilots_ids: List[ID]
+    MGLT: Optional[String] = None
+    cargo_capacity: Optional[String] = None
+    consumables: Optional[String] = None
+    cost_in_credits: Optional[String] = None
+    crew: Optional[String] = None
+    hyperdrive_rating: Optional[String] = None
+    length: Optional[String] = None
+    manufacturer: Optional[String] = None
+    max_atmosphering_speed: Optional[String] = None
+    model: Optional[String] = None
+    passengers: Optional[String] = None
+    starship_class: Optional[String] = None
+    typename__: Optional[Literal['Starship']] = 'Starship'
+
+
+@dataclass(frozen=True, kw_only=True)
+class Vehicle:
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    name: String
+    pilots: List[Person]
+    pilots_ids: List[ID]
+    cargo_capacity: Optional[String] = None
+    consumables: Optional[String] = None
+    cost_in_credits: Optional[String] = None
+    crew: Optional[String] = None
+    length: Optional[String] = None
+    manufacturer: Optional[String] = None
+    max_atmosphering_speed: Optional[String] = None
+    model: Optional[String] = None
+    passengers: Optional[String] = None
+    vehicle_class: Optional[String] = None
+    typename__: Optional[Literal['Vehicle']] = 'Vehicle'
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/simple_star_wars_extra_fields_allow.py 0.45.0-1/tests/data/expected/main/graphql/simple_star_wars_extra_fields_allow.py
--- 0.26.4-3/tests/data/expected/main/graphql/simple_star_wars_extra_fields_allow.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/simple_star_wars_extra_fields_allow.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,177 @@
+# generated by datamodel-codegen:
+#   filename:  simple-star-wars.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, Optional
+
+from pydantic import BaseModel, Extra, Field
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+ID: TypeAlias = str
+"""
+The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID.
+"""
+
+
+Int: TypeAlias = int
+"""
+The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class Film(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    characters: List[Person]
+    characters_ids: List[ID]
+    director: String
+    episode_id: Int
+    id: ID
+    opening_crawl: String
+    planets: List[Planet]
+    planets_ids: List[ID]
+    producer: Optional[String] = None
+    release_date: String
+    species: List[Species]
+    species_ids: List[ID]
+    starships: List[Starship]
+    starships_ids: List[ID]
+    title: String
+    vehicles: List[Vehicle]
+    vehicles_ids: List[ID]
+    typename__: Optional[Literal['Film']] = Field('Film', alias='__typename')
+
+
+class Person(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    birth_year: Optional[String] = None
+    eye_color: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    gender: Optional[String] = None
+    hair_color: Optional[String] = None
+    height: Optional[Int] = None
+    homeworld: Optional[Planet] = None
+    homeworld_id: Optional[ID] = None
+    id: ID
+    mass: Optional[Int] = None
+    name: String
+    skin_color: Optional[String] = None
+    species: List[Species]
+    species_ids: List[ID]
+    starships: List[Starship]
+    starships_ids: List[ID]
+    vehicles: List[Vehicle]
+    vehicles_ids: List[ID]
+    typename__: Optional[Literal['Person']] = Field('Person', alias='__typename')
+
+
+class Planet(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    climate: Optional[String] = None
+    diameter: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    gravity: Optional[String] = None
+    id: ID
+    name: String
+    orbital_period: Optional[String] = None
+    population: Optional[String] = None
+    residents: List[Person]
+    residents_ids: List[ID]
+    rotation_period: Optional[String] = None
+    surface_water: Optional[String] = None
+    terrain: Optional[String] = None
+    typename__: Optional[Literal['Planet']] = Field('Planet', alias='__typename')
+
+
+class Species(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    average_height: Optional[String] = None
+    average_lifespan: Optional[String] = None
+    classification: Optional[String] = None
+    designation: Optional[String] = None
+    eye_colors: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    hair_colors: Optional[String] = None
+    id: ID
+    language: Optional[String] = None
+    name: String
+    people: List[Person]
+    people_ids: List[ID]
+    skin_colors: Optional[String] = None
+    typename__: Optional[Literal['Species']] = Field('Species', alias='__typename')
+
+
+class Starship(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    MGLT: Optional[String] = None
+    cargo_capacity: Optional[String] = None
+    consumables: Optional[String] = None
+    cost_in_credits: Optional[String] = None
+    crew: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    hyperdrive_rating: Optional[String] = None
+    id: ID
+    length: Optional[String] = None
+    manufacturer: Optional[String] = None
+    max_atmosphering_speed: Optional[String] = None
+    model: Optional[String] = None
+    name: String
+    passengers: Optional[String] = None
+    pilots: List[Person]
+    pilots_ids: List[ID]
+    starship_class: Optional[String] = None
+    typename__: Optional[Literal['Starship']] = Field('Starship', alias='__typename')
+
+
+class Vehicle(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    cargo_capacity: Optional[String] = None
+    consumables: Optional[String] = None
+    cost_in_credits: Optional[String] = None
+    crew: Optional[String] = None
+    films: List[Film]
+    films_ids: List[ID]
+    id: ID
+    length: Optional[String] = None
+    manufacturer: Optional[String] = None
+    max_atmosphering_speed: Optional[String] = None
+    model: Optional[String] = None
+    name: String
+    passengers: Optional[String] = None
+    pilots: List[Person]
+    pilots_ids: List[ID]
+    vehicle_class: Optional[String] = None
+    typename__: Optional[Literal['Vehicle']] = Field('Vehicle', alias='__typename')
+
+
+Film.update_forward_refs()
+Person.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/type_alias.py 0.45.0-1/tests/data/expected/main/graphql/type_alias.py
--- 0.26.4-3/tests/data/expected/main/graphql/type_alias.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/type_alias.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,57 @@
+# generated by datamodel-codegen:
+#   filename:  type_alias.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+Int: TypeAlias = int
+"""
+The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
+"""
+
+
+SimpleString: TypeAlias = str
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class Person(BaseModel):
+    age: Int
+    name: String
+    typename__: Optional[Literal['Person']] = Field('Person', alias='__typename')
+
+
+class Pet(BaseModel):
+    name: String
+    type: String
+    typename__: Optional[Literal['Pet']] = Field('Pet', alias='__typename')
+
+
+UnionType: TypeAlias = Union[
+    'Person',
+    'Pet',
+]
+
+
+class ModelWithTypeAliasField(BaseModel):
+    simple_field: Optional[SimpleString] = None
+    string_field: Optional[String] = None
+    union_field: Optional[UnionType] = None
+    typename__: Optional[Literal['ModelWithTypeAliasField']] = Field(
+        'ModelWithTypeAliasField', alias='__typename'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/type_alias_py312.py 0.45.0-1/tests/data/expected/main/graphql/type_alias_py312.py
--- 0.26.4-3/tests/data/expected/main/graphql/type_alias_py312.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/type_alias_py312.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,56 @@
+# generated by datamodel-codegen:
+#   filename:  type_alias.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+type Boolean = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+type Int = int
+"""
+The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
+"""
+
+
+type SimpleString = str
+
+
+type String = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class Person(BaseModel):
+    age: Int
+    name: String
+    typename__: Optional[Literal['Person']] = Field('Person', alias='__typename')
+
+
+class Pet(BaseModel):
+    name: String
+    type: String
+    typename__: Optional[Literal['Pet']] = Field('Pet', alias='__typename')
+
+
+type UnionType = Union[
+    'Person',
+    'Pet',
+]
+
+
+class ModelWithTypeAliasField(BaseModel):
+    simple_field: Optional[SimpleString] = None
+    string_field: Optional[String] = None
+    union_field: Optional[UnionType] = None
+    typename__: Optional[Literal['ModelWithTypeAliasField']] = Field(
+        'ModelWithTypeAliasField', alias='__typename'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/union.py 0.45.0-1/tests/data/expected/main/graphql/union.py
--- 0.26.4-3/tests/data/expected/main/graphql/union.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/union.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,60 @@
+# generated by datamodel-codegen:
+#   filename:  union.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+ID: TypeAlias = str
+"""
+The `ID` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as `"4"`) or integer (such as `4`) input value will be accepted as an ID.
+"""
+
+
+Int: TypeAlias = int
+"""
+The `Int` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class IResource(BaseModel):
+    id: ID
+    typename__: Optional[Literal['IResource']] = Field('IResource', alias='__typename')
+
+
+class Car(IResource):
+    id: ID
+    passengerCapacity: Int
+    typename__: Optional[Literal['Car']] = Field('Car', alias='__typename')
+
+
+class Employee(IResource):
+    firstName: Optional[String] = None
+    id: ID
+    lastName: Optional[String] = None
+    typename__: Optional[Literal['Employee']] = Field('Employee', alias='__typename')
+
+
+Resource: TypeAlias = Union[
+    'Car',
+    'Employee',
+]
+
+
+TechnicalResource: TypeAlias = Car
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/use_standard_collections.py 0.45.0-1/tests/data/expected/main/graphql/use_standard_collections.py
--- 0.26.4-3/tests/data/expected/main/graphql/use_standard_collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/use_standard_collections.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,28 @@
+# generated by datamodel-codegen:
+#   filename:  use-standard-collections.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    field: String
+    listField: list[String]
+    listListField: list[list[String]]
+    typename__: Optional[Literal['A']] = Field('A', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/main/graphql/use_union_operator.py 0.45.0-1/tests/data/expected/main/graphql/use_union_operator.py
--- 0.26.4-3/tests/data/expected/main/graphql/use_union_operator.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/graphql/use_union_operator.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+# generated by datamodel-codegen:
+#   filename:  use-union-operator.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class A(BaseModel):
+    field: String
+    listField: List[String]
+    listListField: List[List[String]]
+    listListOptionalField: List[List[String | None]]
+    listOptionalField: List[String | None]
+    listOptionalListField: List[List[String] | None]
+    listOptionalListOptionalField: List[List[String | None] | None]
+    optionalField: String | None = None
+    optionalListListField: List[List[String]] | None = None
+    optionalListListOptionalField: List[List[String | None]] | None = None
+    optionalListOptionalField: List[String | None] | None = None
+    optionalListOptionalListField: List[List[String] | None] | None = None
+    typename__: Literal['A'] | None = Field('A', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/main/json/general.py 0.45.0-1/tests/data/expected/main/json/general.py
--- 0.26.4-3/tests/data/expected/main/json/general.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/json/general.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  pet.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    name: str
+    age: int
+
+
+class Model(BaseModel):
+    Pet: Pet
diff -pruN 0.26.4-3/tests/data/expected/main/json/json_array_include_null.py 0.45.0-1/tests/data/expected/main/json/json_array_include_null.py
--- 0.26.4-3/tests/data/expected/main/json/json_array_include_null.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/json/json_array_include_null.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  array_include_null.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Item(BaseModel):
+    oofield: Optional[List[int]]
+
+
+class Model(BaseModel):
+    items: List[Item]
diff -pruN 0.26.4-3/tests/data/expected/main/json/json_reuse_model.py 0.45.0-1/tests/data/expected/main/json/json_reuse_model.py
--- 0.26.4-3/tests/data/expected/main/json/json_reuse_model.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/json/json_reuse_model.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  duplicate_models.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class ArmRight(BaseModel):
+    Joint_1: int = Field(..., alias='Joint 1')
+    Joint_2: int = Field(..., alias='Joint 2')
+    Joint_3: int = Field(..., alias='Joint 3')
+
+
+class ArmLeft(ArmRight):
+    pass
+
+
+class Head(BaseModel):
+    Joint_1: int = Field(..., alias='Joint 1')
+
+
+class Model(BaseModel):
+    Arm_Right: ArmRight = Field(..., alias='Arm Right')
+    Arm_Left: ArmLeft = Field(..., alias='Arm Left')
+    Head: Head
diff -pruN 0.26.4-3/tests/data/expected/main/json/json_reuse_model_pydantic2.py 0.45.0-1/tests/data/expected/main/json/json_reuse_model_pydantic2.py
--- 0.26.4-3/tests/data/expected/main/json/json_reuse_model_pydantic2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/json/json_reuse_model_pydantic2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  duplicate_models.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class ArmRight(BaseModel):
+    Joint_1: int = Field(..., alias='Joint 1')
+    Joint_2: int = Field(..., alias='Joint 2')
+    Joint_3: int = Field(..., alias='Joint 3')
+
+
+ArmLeft = ArmRight
+
+
+class Head(BaseModel):
+    Joint_1: int = Field(..., alias='Joint 1')
+
+
+class Model(BaseModel):
+    Arm_Right: ArmRight = Field(..., alias='Arm Right')
+    Arm_Left: ArmLeft = Field(..., alias='Arm Left')
+    Head_1: Head = Field(..., alias='Head')
diff -pruN 0.26.4-3/tests/data/expected/main/json/json_snake_case_field.py 0.45.0-1/tests/data/expected/main/json/json_snake_case_field.py
--- 0.26.4-3/tests/data/expected/main/json/json_snake_case_field.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/json/json_snake_case_field.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  snake_case.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Model(BaseModel):
+    snake_case: str
+    camel_case: str = Field(..., alias='camelCase')
+    kebab_case: str = Field(..., alias='kebab-case')
+    pascal_case: str = Field(..., alias='PascalCase')
+    upper_case: str = Field(..., alias='UPPER_CASE')
+    dev_info: str = Field(..., alias='Dev_Info')
+    clone_device: str = Field(..., alias='CLONE_Device')
diff -pruN 0.26.4-3/tests/data/expected/main/json/simple_json_snake_case_field.py 0.45.0-1/tests/data/expected/main/json/simple_json_snake_case_field.py
--- 0.26.4-3/tests/data/expected/main/json/simple_json_snake_case_field.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/json/simple_json_snake_case_field.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  simple.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Model(BaseModel):
+    pet_name: str = Field(..., alias='petName')
diff -pruN 0.26.4-3/tests/data/expected/main/json/space_and_special_characters.py 0.45.0-1/tests/data/expected/main/json/space_and_special_characters.py
--- 0.26.4-3/tests/data/expected/main/json/space_and_special_characters.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/json/space_and_special_characters.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,46 @@
+# generated by datamodel-codegen:
+#   filename:  space_and_special_characters.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class InitialParameters(BaseModel):
+    V1: int
+    V2: int
+
+
+class Data(BaseModel):
+    Length__m_: float = Field(..., alias='Length (m)')
+    Symmetric_deviation____: float = Field(..., alias='Symmetric deviation (%)')
+    Total_running_time__s_: int = Field(..., alias='Total running time (s)')
+    Mass__kg_: float = Field(..., alias='Mass (kg)')
+    Initial_parameters: InitialParameters = Field(..., alias='Initial parameters')
+    class_: str = Field(..., alias='class')
+
+
+class Values(BaseModel):
+    field_1_Step: str = Field(..., alias='1 Step')
+    field_2_Step: str = Field(..., alias='2 Step')
+
+
+class Recursive1(BaseModel):
+    value: float
+
+
+class Sub(BaseModel):
+    recursive: Recursive1
+
+
+class Recursive(BaseModel):
+    sub: Sub
+
+
+class Model(BaseModel):
+    Serial_Number: str = Field(..., alias='Serial Number')
+    Timestamp: str
+    Data: Data
+    values: Values
+    recursive: Recursive
diff -pruN 0.26.4-3/tests/data/expected/main/json/typed_dict_space_and_special_characters.py 0.45.0-1/tests/data/expected/main/json/typed_dict_space_and_special_characters.py
--- 0.26.4-3/tests/data/expected/main/json/typed_dict_space_and_special_characters.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/json/typed_dict_space_and_special_characters.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,58 @@
+# generated by datamodel-codegen:
+#   filename:  space_and_special_characters.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import TypedDict
+
+
+class InitialParameters(TypedDict):
+    V1: int
+    V2: int
+
+
+Data = TypedDict(
+    'Data',
+    {
+        'Length (m)': float,
+        'Symmetric deviation (%)': float,
+        'Total running time (s)': int,
+        'Mass (kg)': float,
+        'Initial parameters': InitialParameters,
+        'class': str,
+    },
+)
+
+
+Values = TypedDict(
+    'Values',
+    {
+        '1 Step': str,
+        '2 Step': str,
+    },
+)
+
+
+class Recursive1(TypedDict):
+    value: float
+
+
+class Sub(TypedDict):
+    recursive: Recursive1
+
+
+class Recursive(TypedDict):
+    sub: Sub
+
+
+Model = TypedDict(
+    'Model',
+    {
+        'Serial Number': str,
+        'Timestamp': str,
+        'Data': Data,
+        'values': Values,
+        'recursive': Recursive,
+    },
+)
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/alias_import_alias/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/alias_import_alias/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/alias_import_alias/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/alias_import_alias/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  alias_import_alias
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/alias_import_alias/a_schema.py 0.45.0-1/tests/data/expected/main/jsonschema/alias_import_alias/a_schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/alias_import_alias/a_schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/alias_import_alias/a_schema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  a.schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class TypeA(BaseModel):
+    value: str
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/alias_import_alias/b_schema.py 0.45.0-1/tests/data/expected/main/jsonschema/alias_import_alias/b_schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/alias_import_alias/b_schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/alias_import_alias/b_schema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  b.schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+from . import a_schema as a_schema_1
+
+
+class Container(BaseModel):
+    a_schema: a_schema_1.TypeA
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/alias_import_alias/date_schema.py 0.45.0-1/tests/data/expected/main/jsonschema/alias_import_alias/date_schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/alias_import_alias/date_schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/alias_import_alias/date_schema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  date.schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import date as date_aliased
+from datetime import datetime as datetime_aliased
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class TypeDate(BaseModel):
+    date: Optional[date_aliased]
+    datetime: Optional[datetime_aliased] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_exports_multi_file/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/all_exports_multi_file/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_exports_multi_file/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/all_exports_multi_file/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  all_exports_multi_file
+
+from __future__ import annotations
+
+from .order import Order
+from .product import Product
+from .user import User
+
+__all__ = [
+    "Order",
+    "Product",
+    "User",
+]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_exports_multi_file/order.py 0.45.0-1/tests/data/expected/main/jsonschema/all_exports_multi_file/order.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_exports_multi_file/order.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/all_exports_multi_file/order.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  order.json
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Order(BaseModel):
+    id: str
+    user_id: str
+    product_id: str
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_exports_multi_file/product.py 0.45.0-1/tests/data/expected/main/jsonschema/all_exports_multi_file/product.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_exports_multi_file/product.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/all_exports_multi_file/product.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  product.json
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Product(BaseModel):
+    id: str
+    price: float
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_exports_multi_file/user.py 0.45.0-1/tests/data/expected/main/jsonschema/all_exports_multi_file/user.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_exports_multi_file/user.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/all_exports_multi_file/user.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  user.json
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class User(BaseModel):
+    id: str
+    name: str
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_any_of/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/all_of_any_of/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_any_of/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/all_of_any_of/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  all_of_any_of
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_any_of/direct.py 0.45.0-1/tests/data/expected/main/jsonschema/all_of_any_of/direct.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_any_of/direct.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/all_of_any_of/direct.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+# generated by datamodel-codegen:
+#   filename:  direct.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Union
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Target1(BaseModel):
+    first: str
+
+
+class Target2(BaseModel):
+    second: str
+
+
+class Target3(BaseModel):
+    third: str
+
+
+class Target4(Target1, Target3):
+    pass
+
+
+class Target5(Target2, Target3):
+    pass
+
+
+class Target(BaseModel):
+    __root__: Union[Target4, Target5]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_any_of/reference.py 0.45.0-1/tests/data/expected/main/jsonschema/all_of_any_of/reference.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_any_of/reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/all_of_any_of/reference.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+# generated by datamodel-codegen:
+#   filename:  reference.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Union
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class First(BaseModel):
+    first: str
+
+
+class Second(BaseModel):
+    second: str
+
+
+class Target(BaseModel):
+    third: str
+
+
+class Target8(First, Target):
+    pass
+
+
+class Target9(Second, Target):
+    pass
+
+
+class Target6(BaseModel):
+    __root__: Union[Target8, Target9]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_any_of_base_class_ref.py 0.45.0-1/tests/data/expected/main/jsonschema/all_of_any_of_base_class_ref.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_any_of_base_class_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/all_of_any_of_base_class_ref.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,74 @@
+# generated by datamodel-codegen:
+#   filename:  all_of_any_of_base_class_ref.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel, Field, confloat
+
+
+class MapState1(BaseModel):
+    map_view_mode: str = Field("MODE_2D", alias="mapViewMode", const=True)
+
+
+class MapState2(BaseModel):
+    latitude: Latitude
+    longitude: Longitude
+    zoom: Optional[Zoom] = Field(default_factory=lambda: Zoom.parse_obj(0))
+    bearing: Optional[Bearing] = None
+    pitch: Pitch
+    drag_rotate: Optional[DragRotate] = Field(None, alias="dragRotate")
+    map_split_mode: str = Field("SWIPE_COMPARE", alias="mapSplitMode", const=True)
+    is_split: bool = Field(True, alias="isSplit", const=True)
+
+
+class MapState3(BaseModel):
+    pass
+
+
+class MapState4(MapState1, MapState3):
+    pass
+
+
+class MapState5(MapState2, MapState3):
+    pass
+
+
+class MapState6(MapState4):
+    pass
+
+
+class MapState7(MapState5):
+    pass
+
+
+class MapState(BaseModel):
+    __root__: Union[MapState4, MapState5, MapState6, MapState7] = Field(
+        ..., title="MapState"
+    )
+
+
+class Bearing(BaseModel):
+    __root__: float
+
+
+class DragRotate(BaseModel):
+    __root__: bool
+
+
+class Latitude(BaseModel):
+    __root__: confloat(ge=-90.0, le=90.0)
+
+
+class Longitude(BaseModel):
+    __root__: confloat(ge=-180.0, le=180.0)
+
+
+class Pitch(BaseModel):
+    __root__: confloat(ge=0.0, lt=90.0)
+
+
+class Zoom(BaseModel):
+    __root__: confloat(ge=0.0, le=25.0)
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_merge_boolean_property.py 0.45.0-1/tests/data/expected/main/jsonschema/all_of_merge_boolean_property.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_merge_boolean_property.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/all_of_merge_boolean_property.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  all_of_merge_boolean_property.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    data: Optional[Any] = None
+
+
+class Data(BaseModel):
+    value: Optional[str] = None
+
+
+class Base1(BaseModel):
+    data: Optional[Data] = None
+
+
+class Base2(BaseModel):
+    data: Optional[Any] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_merge_same_property.py 0.45.0-1/tests/data/expected/main/jsonschema/all_of_merge_same_property.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_merge_same_property.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/all_of_merge_same_property.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,34 @@
+# generated by datamodel-codegen:
+#   filename:  all_of_merge_same_property.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Links(BaseModel):
+    self: Optional[str] = None
+    collection: Optional[str] = None
+
+
+class Model(BaseModel):
+    links: Links
+
+
+class Links1(BaseModel):
+    self: Optional[str] = None
+
+
+class SelfLink(BaseModel):
+    links: Links1
+
+
+class Links2(BaseModel):
+    collection: Optional[str] = None
+
+
+class CollectionLink(BaseModel):
+    links: Links2
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_one_of/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/all_of_one_of/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_one_of/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/all_of_one_of/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  all_of_one_of
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_one_of/direct.py 0.45.0-1/tests/data/expected/main/jsonschema/all_of_one_of/direct.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_one_of/direct.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/all_of_one_of/direct.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+# generated by datamodel-codegen:
+#   filename:  direct.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Union
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Target1(BaseModel):
+    first: str
+
+
+class Target2(BaseModel):
+    second: str
+
+
+class Target3(BaseModel):
+    third: str
+
+
+class Target4(Target1, Target3):
+    pass
+
+
+class Target5(Target2, Target3):
+    pass
+
+
+class Target(BaseModel):
+    __root__: Union[Target4, Target5]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_one_of/reference.py 0.45.0-1/tests/data/expected/main/jsonschema/all_of_one_of/reference.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_one_of/reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/all_of_one_of/reference.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+# generated by datamodel-codegen:
+#   filename:  reference.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Union
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class First(BaseModel):
+    first: str
+
+
+class Second(BaseModel):
+    second: str
+
+
+class Target(BaseModel):
+    third: str
+
+
+class Target8(First, Target):
+    pass
+
+
+class Target9(Second, Target):
+    pass
+
+
+class Target6(BaseModel):
+    __root__: Union[Target8, Target9]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_ref.py 0.45.0-1/tests/data/expected/main/jsonschema/all_of_ref.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/all_of_ref.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  test.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class First(BaseModel):
+    second: str = Field(..., description='Second', examples=['second'])
+
+
+class Test(First):
+    pass
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_ref_self.py 0.45.0-1/tests/data/expected/main/jsonschema/all_of_ref_self.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_ref_self.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/all_of_ref_self.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  all_of_ref_self.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Version(BaseModel):
+    __root__: None
+
+
+class Model(BaseModel):
+    version: Optional[Version] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_ref_with_property_override.py 0.45.0-1/tests/data/expected/main/jsonschema/all_of_ref_with_property_override.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_ref_with_property_override.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/all_of_ref_with_property_override.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  all_of_ref_with_property_override.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, constr
+
+
+class Thing(BaseModel):
+    type: str
+    name: constr(min_length=1)
+
+
+class Person(Thing):
+    type: Optional[str] = 'playground:Person'
+    name: Optional[constr(min_length=1)] = None
+    age: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_use_default.py 0.45.0-1/tests/data/expected/main/jsonschema/all_of_use_default.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_use_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/all_of_use_default.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  all_of_default.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Item(BaseModel):
+    test: Optional[str] = 'test123'
+    testarray: Optional[List[str]] = Field(['test123'], min_items=1, title='test array')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/all_of_with_object.py 0.45.0-1/tests/data/expected/main/jsonschema/all_of_with_object.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/all_of_with_object.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/all_of_with_object.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  all_of_with_object.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Home(BaseModel):
+    address: Optional[str] = None
+    zip: Optional[str] = None
+
+
+class Kind(BaseModel):
+    description: Optional[str] = None
+
+
+class Id(BaseModel):
+    id: Optional[int] = None
+
+
+class Pet(Home, Kind, Id):
+    name: Optional[str] = None
+    age: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/allof_root_model_constraints.py 0.45.0-1/tests/data/expected/main/jsonschema/allof_root_model_constraints.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/allof_root_model_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/allof_root_model_constraints.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,162 @@
+# generated by datamodel-codegen:
+#   filename:  allof_root_model_constraints.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel, EmailStr, Field, conint, constr
+
+
+class StringDatatype(BaseModel):
+    __root__: constr(regex=r'^\S(.*\S)?$') = Field(
+        ..., description='A base string type.'
+    )
+
+
+class ConstrainedStringDatatype(BaseModel):
+    __root__: constr(regex=r'^[A-Z].*', min_length=1) = Field(
+        ..., description='A constrained string.'
+    )
+
+
+class IntegerDatatype(BaseModel):
+    __root__: int = Field(..., description='A whole number.')
+
+
+class NonNegativeIntegerDatatype(BaseModel):
+    __root__: conint(ge=0) = Field(..., description='Non-negative integer.')
+
+
+class BoundedIntegerDatatype(BaseModel):
+    __root__: conint(ge=0, le=100) = Field(
+        ..., description='Integer between 0 and 100.'
+    )
+
+
+class EmailDatatype(BaseModel):
+    __root__: EmailStr = Field(..., description='Email with format.')
+
+
+class FormattedStringDatatype(BaseModel):
+    __root__: EmailStr = Field(..., description='A string with email format.')
+
+
+class ObjectBase(BaseModel):
+    id: Optional[int] = None
+
+
+class ObjectWithAllOf(ObjectBase):
+    name: Optional[str] = None
+
+
+class MultiRefAllOf(BaseModel):
+    pass
+
+
+class NoConstraintAllOf(BaseModel):
+    pass
+
+
+class IncompatibleTypeAllOf(BaseModel):
+    pass
+
+
+class ConstraintWithProperties(BaseModel):
+    extra: Optional[str] = None
+
+
+class ConstraintWithItems(BaseModel):
+    pass
+
+
+class NumberIntegerCompatible(BaseModel):
+    __root__: conint(ge=0) = Field(
+        ..., description='Number and integer are compatible.'
+    )
+
+
+class RefWithSchemaKeywords(BaseModel):
+    __root__: constr(regex=r'^\S(.*\S)?$', min_length=5, max_length=100) = Field(
+        ..., description='Ref with additional schema keywords.'
+    )
+
+
+class ArrayDatatype(BaseModel):
+    __root__: List[str]
+
+
+class RefToArrayAllOf(BaseModel):
+    pass
+
+
+class ObjectNoPropsDatatype(BaseModel):
+    pass
+
+
+class RefToObjectNoPropsAllOf(ObjectNoPropsDatatype):
+    pass
+
+
+class PatternPropsDatatype(BaseModel):
+    __root__: Dict[constr(regex=r'^S_'), str]
+
+
+class RefToPatternPropsAllOf(BaseModel):
+    pass
+
+
+class NestedAllOfDatatype(BaseModel):
+    pass
+
+
+class RefToNestedAllOfAllOf(NestedAllOfDatatype):
+    pass
+
+
+class ConstraintsOnlyDatatype(BaseModel):
+    __root__: Any = Field(..., description='Constraints only, no type.')
+
+
+class RefToConstraintsOnlyAllOf(BaseModel):
+    __root__: Any = Field(..., description='Ref to constraints-only schema.')
+
+
+class NoDescriptionAllOf(BaseModel):
+    __root__: constr(regex=r'^\S(.*\S)?$', min_length=5) = Field(
+        ..., description='A base string type.'
+    )
+
+
+class EmptyConstraintItemAllOf(BaseModel):
+    __root__: constr(regex=r'^\S(.*\S)?$', max_length=50) = Field(
+        ..., description='AllOf with empty constraint item.'
+    )
+
+
+class ConflictingFormatAllOf(BaseModel):
+    pass
+
+
+class Model(BaseModel):
+    name: Optional[ConstrainedStringDatatype] = None
+    count: Optional[NonNegativeIntegerDatatype] = None
+    percentage: Optional[BoundedIntegerDatatype] = None
+    email: Optional[EmailDatatype] = None
+    obj: Optional[ObjectWithAllOf] = None
+    multi: Optional[MultiRefAllOf] = None
+    noconstraint: Optional[NoConstraintAllOf] = None
+    incompatible: Optional[IncompatibleTypeAllOf] = None
+    withprops: Optional[ConstraintWithProperties] = None
+    withitems: Optional[ConstraintWithItems] = None
+    numint: Optional[NumberIntegerCompatible] = None
+    refwithkw: Optional[RefWithSchemaKeywords] = None
+    refarr: Optional[RefToArrayAllOf] = None
+    refobjnoprops: Optional[RefToObjectNoPropsAllOf] = None
+    refpatternprops: Optional[RefToPatternPropsAllOf] = None
+    refnestedallof: Optional[RefToNestedAllOfAllOf] = None
+    refconstraintsonly: Optional[RefToConstraintsOnlyAllOf] = None
+    nodescription: Optional[NoDescriptionAllOf] = None
+    emptyconstraint: Optional[EmptyConstraintItemAllOf] = None
+    conflictingformat: Optional[ConflictingFormatAllOf] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/allof_root_model_constraints_merge.py 0.45.0-1/tests/data/expected/main/jsonschema/allof_root_model_constraints_merge.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/allof_root_model_constraints_merge.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/allof_root_model_constraints_merge.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,162 @@
+# generated by datamodel-codegen:
+#   filename:  allof_root_model_constraints.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel, EmailStr, Field, conint, constr
+
+
+class StringDatatype(BaseModel):
+    __root__: constr(regex=r'^\S(.*\S)?$') = Field(
+        ..., description='A base string type.'
+    )
+
+
+class ConstrainedStringDatatype(BaseModel):
+    __root__: constr(regex=r'(?=^\S(.*\S)?$)(?=^[A-Z].*)', min_length=1) = Field(
+        ..., description='A constrained string.'
+    )
+
+
+class IntegerDatatype(BaseModel):
+    __root__: int = Field(..., description='A whole number.')
+
+
+class NonNegativeIntegerDatatype(BaseModel):
+    __root__: conint(ge=0) = Field(..., description='Non-negative integer.')
+
+
+class BoundedIntegerDatatype(BaseModel):
+    __root__: conint(ge=0, le=100) = Field(
+        ..., description='Integer between 0 and 100.'
+    )
+
+
+class EmailDatatype(BaseModel):
+    __root__: EmailStr = Field(..., description='Email with format.')
+
+
+class FormattedStringDatatype(BaseModel):
+    __root__: EmailStr = Field(..., description='A string with email format.')
+
+
+class ObjectBase(BaseModel):
+    id: Optional[int] = None
+
+
+class ObjectWithAllOf(ObjectBase):
+    name: Optional[str] = None
+
+
+class MultiRefAllOf(BaseModel):
+    pass
+
+
+class NoConstraintAllOf(BaseModel):
+    pass
+
+
+class IncompatibleTypeAllOf(BaseModel):
+    pass
+
+
+class ConstraintWithProperties(BaseModel):
+    extra: Optional[str] = None
+
+
+class ConstraintWithItems(BaseModel):
+    pass
+
+
+class NumberIntegerCompatible(BaseModel):
+    __root__: conint(ge=0) = Field(
+        ..., description='Number and integer are compatible.'
+    )
+
+
+class RefWithSchemaKeywords(BaseModel):
+    __root__: constr(regex=r'^\S(.*\S)?$', min_length=5, max_length=100) = Field(
+        ..., description='Ref with additional schema keywords.'
+    )
+
+
+class ArrayDatatype(BaseModel):
+    __root__: List[str]
+
+
+class RefToArrayAllOf(BaseModel):
+    pass
+
+
+class ObjectNoPropsDatatype(BaseModel):
+    pass
+
+
+class RefToObjectNoPropsAllOf(ObjectNoPropsDatatype):
+    pass
+
+
+class PatternPropsDatatype(BaseModel):
+    __root__: Dict[constr(regex=r'^S_'), str]
+
+
+class RefToPatternPropsAllOf(BaseModel):
+    pass
+
+
+class NestedAllOfDatatype(BaseModel):
+    pass
+
+
+class RefToNestedAllOfAllOf(NestedAllOfDatatype):
+    pass
+
+
+class ConstraintsOnlyDatatype(BaseModel):
+    __root__: Any = Field(..., description='Constraints only, no type.')
+
+
+class RefToConstraintsOnlyAllOf(BaseModel):
+    __root__: Any = Field(..., description='Ref to constraints-only schema.')
+
+
+class NoDescriptionAllOf(BaseModel):
+    __root__: constr(regex=r'^\S(.*\S)?$', min_length=5) = Field(
+        ..., description='A base string type.'
+    )
+
+
+class EmptyConstraintItemAllOf(BaseModel):
+    __root__: constr(regex=r'^\S(.*\S)?$', max_length=50) = Field(
+        ..., description='AllOf with empty constraint item.'
+    )
+
+
+class ConflictingFormatAllOf(BaseModel):
+    pass
+
+
+class Model(BaseModel):
+    name: Optional[ConstrainedStringDatatype] = None
+    count: Optional[NonNegativeIntegerDatatype] = None
+    percentage: Optional[BoundedIntegerDatatype] = None
+    email: Optional[EmailDatatype] = None
+    obj: Optional[ObjectWithAllOf] = None
+    multi: Optional[MultiRefAllOf] = None
+    noconstraint: Optional[NoConstraintAllOf] = None
+    incompatible: Optional[IncompatibleTypeAllOf] = None
+    withprops: Optional[ConstraintWithProperties] = None
+    withitems: Optional[ConstraintWithItems] = None
+    numint: Optional[NumberIntegerCompatible] = None
+    refwithkw: Optional[RefWithSchemaKeywords] = None
+    refarr: Optional[RefToArrayAllOf] = None
+    refobjnoprops: Optional[RefToObjectNoPropsAllOf] = None
+    refpatternprops: Optional[RefToPatternPropsAllOf] = None
+    refnestedallof: Optional[RefToNestedAllOfAllOf] = None
+    refconstraintsonly: Optional[RefToConstraintsOnlyAllOf] = None
+    nodescription: Optional[NoDescriptionAllOf] = None
+    emptyconstraint: Optional[EmptyConstraintItemAllOf] = None
+    conflictingformat: Optional[ConflictingFormatAllOf] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/anyof_const_enum_nested.py 0.45.0-1/tests/data/expected/main/jsonschema/anyof_const_enum_nested.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/anyof_const_enum_nested.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/anyof_const_enum_nested.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  anyof_const_enum_nested.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Mode(Enum):
+    fast = 'fast'
+    slow = 'slow'
+
+
+class Mode1(Enum):
+    a = 'a'
+    b = 'b'
+
+
+class Config(BaseModel):
+    mode: Optional[Mode] = Field(None, title='Mode')
+    modes: Optional[List[Mode1]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/anyof_const_enum_nested_literal.py 0.45.0-1/tests/data/expected/main/jsonschema/anyof_const_enum_nested_literal.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/anyof_const_enum_nested_literal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/anyof_const_enum_nested_literal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  anyof_const_enum_nested.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Config(BaseModel):
+    mode: Optional[Literal['fast', 'slow']] = Field(None, title='Mode')
+    modes: Optional[List[Literal['a', 'b']]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/array_field_constraints.py 0.45.0-1/tests/data/expected/main/jsonschema/array_field_constraints.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/array_field_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/array_field_constraints.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,22 @@
+# generated by datamodel-codegen:
+#   filename:  array_field_constraints.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import BaseModel, Field
+
+
+class Number(BaseModel):
+    __root__: str = Field(
+        ...,
+        description='Just a number',
+        examples=['1', '5464446', '684572369854259'],
+        regex='^\\d{1,15}$',
+    )
+
+
+class TestSchema(BaseModel):
+    numbers: List[Number] = Field(..., description='A list of numbers')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/array_in_additional_properties.py 0.45.0-1/tests/data/expected/main/jsonschema/array_in_additional_properties.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/array_in_additional_properties.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/array_in_additional_properties.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  array_in_additional_properties.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, List
+
+from pydantic import BaseModel
+
+
+class MyJsonOfListOfString(BaseModel):
+    __root__: Dict[str, List[str]]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/autodetect.py 0.45.0-1/tests/data/expected/main/jsonschema/autodetect.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/autodetect.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/autodetect.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel, Field, conint
+
+
+class Person(BaseModel):
+    firstName: Optional[str] = Field(None, description="The person's first name.")
+    lastName: Optional[str] = Field(None, description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[List[Any]] = None
+    comment: None = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/boolean_property.py 0.45.0-1/tests/data/expected/main/jsonschema/boolean_property.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/boolean_property.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/boolean_property.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  boolean_property.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    field: Optional[Any] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/bundled_schema_with_id.py 0.45.0-1/tests/data/expected/main/jsonschema/bundled_schema_with_id.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/bundled_schema_with_id.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/bundled_schema_with_id.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,31 @@
+# generated by datamodel-codegen:
+#   filename:  bundled_schema_with_id.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    bird = 'bird'
+
+
+class Pet(BaseModel):
+    name: str
+    species: Species
+
+
+class User(BaseModel):
+    name: str
+    pet: Optional[Pet] = None
+
+
+class BundledSchema(BaseModel):
+    user: Optional[User] = None
+    pet: Optional[Pet] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/circular_reference.py 0.45.0-1/tests/data/expected/main/jsonschema/circular_reference.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/circular_reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/circular_reference.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,35 @@
+# generated by datamodel-codegen:
+#   filename:  circular_reference.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Member(BaseModel):
+    __root__: User = Field(..., title='Member')
+
+
+class User(BaseModel):
+    name: Optional[str] = None
+    pet: Optional[Animal] = None
+    home: Optional[House] = None
+
+
+class Animal(BaseModel):
+    name: Optional[str] = None
+    breeder: Optional[User] = None
+    home: Optional[House] = None
+
+
+class House(BaseModel):
+    address: Optional[str] = None
+    owner: Optional[User] = None
+
+
+Member.update_forward_refs()
+User.update_forward_refs()
+Animal.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/combine_any_of_object.py 0.45.0-1/tests/data/expected/main/jsonschema/combine_any_of_object.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/combine_any_of_object.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/combine_any_of_object.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,52 @@
+# generated by datamodel-codegen:
+#   filename:  combine_any_of_object.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel, Extra, Field
+
+
+class MySchema1(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+    State: Optional[str] = None
+    ZipCode: str
+
+
+class MySchema2(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+    County: Optional[str] = None
+    PostCode: str
+
+
+class US(BaseModel):
+    County: Optional[str] = None
+    PostCode: str
+
+
+class MySchema3(US):
+    class Config:
+        extra = Extra.allow
+
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+
+
+class MySchema(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    __root__: Union[MySchema1, MySchema2, MySchema3] = Field(..., title='My schema')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/combine_any_of_object_left_to_right.py 0.45.0-1/tests/data/expected/main/jsonschema/combine_any_of_object_left_to_right.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/combine_any_of_object_left_to_right.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/combine_any_of_object_left_to_right.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,51 @@
+# generated by datamodel-codegen:
+#   filename:  combine_any_of_object.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel, ConfigDict, Field, RootModel
+
+
+class MySchema1(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+    State: Optional[str] = None
+    ZipCode: str
+
+
+class MySchema2(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+    County: Optional[str] = None
+    PostCode: str
+
+
+class US(BaseModel):
+    County: Optional[str] = None
+    PostCode: str
+
+
+class MySchema3(US):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+
+
+class MySchema(RootModel[Union[MySchema1, MySchema2, MySchema3]]):
+    root: Union[MySchema1, MySchema2, MySchema3] = Field(
+        ..., title='My schema', union_mode='left_to_right'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/combine_any_of_object_v2.py 0.45.0-1/tests/data/expected/main/jsonschema/combine_any_of_object_v2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/combine_any_of_object_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/combine_any_of_object_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,49 @@
+# generated by datamodel-codegen:
+#   filename:  combine_any_of_object.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel, ConfigDict, Field, RootModel
+
+
+class MySchema1(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+    State: Optional[str] = None
+    ZipCode: str
+
+
+class MySchema2(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+    County: Optional[str] = None
+    PostCode: str
+
+
+class US(BaseModel):
+    County: Optional[str] = None
+    PostCode: str
+
+
+class MySchema3(US):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+
+
+class MySchema(RootModel[Union[MySchema1, MySchema2, MySchema3]]):
+    root: Union[MySchema1, MySchema2, MySchema3] = Field(..., title='My schema')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/combine_one_of_object.py 0.45.0-1/tests/data/expected/main/jsonschema/combine_one_of_object.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/combine_one_of_object.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/combine_one_of_object.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,52 @@
+# generated by datamodel-codegen:
+#   filename:  combine_one_of_object.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel, Extra, Field
+
+
+class MySchema1(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+    State: Optional[str] = None
+    ZipCode: str
+
+
+class MySchema2(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+    County: Optional[str] = None
+    PostCode: str
+
+
+class US(BaseModel):
+    County: Optional[str] = None
+    PostCode: str
+
+
+class MySchema3(US):
+    class Config:
+        extra = Extra.allow
+
+    AddressLine1: str
+    AddressLine2: Optional[str] = None
+    City: Optional[str] = None
+
+
+class MySchema(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    __root__: Union[MySchema1, MySchema2, MySchema3] = Field(..., title='My schema')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/combined_array.py 0.45.0-1/tests/data/expected/main/jsonschema/combined_array.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/combined_array.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/combined_array.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,116 @@
+# generated by datamodel-codegen:
+#   filename:  combined_array.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Pet1(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+
+
+class Pet(BaseModel):
+    __root__: Union[List[Pet1], Pet1] = Field(..., title='Pet')
+
+
+class CombinedEnum1(Enum):
+    green = 'green'
+    red = 'red'
+
+
+class CombinedEnumField(Enum):
+    green = 'green'
+    red = 'red'
+
+
+class CombinedObjectField1(BaseModel):
+    color: Optional[str] = None
+
+
+class CombinedSelf1(BaseModel):
+    color: Optional[str] = None
+
+
+class CombinedSelf(BaseModel):
+    __root__: Union[List[CombinedSelf1], CombinedSelf1]
+
+
+class CombinedSelfEnum1(BaseModel):
+    color: Optional[str] = None
+
+
+class CombinedSelfEnum2(Enum):
+    green = 'green'
+    red = 'red'
+
+
+class CombinedSelfEnum(BaseModel):
+    __root__: Union[
+        List[Union[CombinedSelfEnum1, CombinedSelfEnum2]],
+        CombinedSelfEnum1,
+        CombinedSelfEnum2,
+    ]
+
+
+class CombinedSelfAllOf2(Enum):
+    green = 'green'
+    red = 'red'
+
+
+class Kind(BaseModel):
+    description: Optional[str] = None
+
+
+class Id(BaseModel):
+    id: Optional[int] = None
+
+
+class CustomRootModel(BaseModel):
+    __root__: str
+
+
+class CombinedEnum(BaseModel):
+    __root__: Union[List[Kind], CombinedEnum1]
+
+
+class CombinedAllOf1(Kind, Id):
+    pass
+
+
+class CombinedAllOf(BaseModel):
+    __root__: Union[List[Kind], CombinedAllOf1]
+
+
+class CombinedAllOfField(Kind, Id):
+    pass
+
+
+class CombinedAllOfObjectField(Kind, Id):
+    color: Optional[str] = None
+
+
+class CombinedObjectField(BaseModel):
+    CombinedEnumField: Optional[Union[List[Kind], CombinedEnumField]] = None
+    CombinedAllOfField: Optional[Union[List[Kind], CombinedAllOfField]] = None
+    CombinedObjectField: Optional[Union[List[Kind], CombinedObjectField1]] = None
+    CombinedAllOfObjectField: Optional[
+        Union[List[Kind], CombinedAllOfObjectField]
+    ] = None
+
+
+class CombinedSelfAllOf1(Kind, Id):
+    color: Optional[str] = None
+
+
+class CombinedSelfAllOf(BaseModel):
+    __root__: Union[
+        List[Union[CombinedSelfAllOf1, CombinedSelfAllOf2]],
+        CombinedSelfAllOf1,
+        CombinedSelfAllOf2,
+    ]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/complex_any_of.py 0.45.0-1/tests/data/expected/main/jsonschema/complex_any_of.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/complex_any_of.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/complex_any_of.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+# generated by datamodel-codegen:
+#   filename:  complex_any_of.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Union
+
+from pydantic import BaseModel, Extra
+
+
+class Key(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    address: str
+    nat: str
+
+
+class ModelItem(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    key: Key
+    value: str
+
+
+class Model(BaseModel):
+    __root__: Union[int, List[ModelItem]]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/complex_one_of.py 0.45.0-1/tests/data/expected/main/jsonschema/complex_one_of.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/complex_one_of.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/complex_one_of.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+# generated by datamodel-codegen:
+#   filename:  complex_one_of.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Union
+
+from pydantic import BaseModel, Extra
+
+
+class Key(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    address: str
+    nat: str
+
+
+class ModelItem(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    key: Key
+    value: str
+
+
+class Model(BaseModel):
+    __root__: Union[int, List[ModelItem]]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/complicated_enum_default_member.py 0.45.0-1/tests/data/expected/main/jsonschema/complicated_enum_default_member.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/complicated_enum_default_member.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/complicated_enum_default_member.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  complicated_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ProcessingStatus(Enum):
+    COMPLETED = 'COMPLETED'
+    PENDING = 'PENDING'
+    FAILED = 'FAILED'
+
+
+class Kind(BaseModel):
+    __root__: str
+
+
+class ProcessingTask(BaseModel):
+    processing_status_union: Optional[ProcessingStatus] = ProcessingStatus.COMPLETED
+    processing_status: Optional[ProcessingStatus] = ProcessingStatus.COMPLETED
+    name: Optional[str] = None
+    kind: Optional[Kind] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/complicated_enum_default_member_dataclass.py 0.45.0-1/tests/data/expected/main/jsonschema/complicated_enum_default_member_dataclass.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/complicated_enum_default_member_dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/complicated_enum_default_member_dataclass.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,28 @@
+# generated by datamodel-codegen:
+#   filename:  complicated_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from enum import Enum
+from typing import Optional
+
+from typing_extensions import TypeAlias
+
+
+class ProcessingStatus(Enum):
+    COMPLETED = 'COMPLETED'
+    PENDING = 'PENDING'
+    FAILED = 'FAILED'
+
+
+Kind: TypeAlias = str
+
+
+@dataclass
+class ProcessingTask:
+    processing_status_union: Optional[ProcessingStatus] = ProcessingStatus.COMPLETED
+    processing_status: Optional[ProcessingStatus] = ProcessingStatus.COMPLETED
+    name: Optional[str] = None
+    kind: Optional[Kind] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/custom_base_path.py 0.45.0-1/tests/data/expected/main/jsonschema/custom_base_path.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/custom_base_path.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/custom_base_path.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,34 @@
+# generated by datamodel-codegen:
+#   filename:  custom_base_path.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from custom.models import Animal, Person, Property
+
+
+class Spouse(Person):
+    job: Optional[str] = None
+
+
+class Pet(Animal):
+    name: Optional[str] = None
+
+
+class Child(Person):
+    school: Optional[str] = None
+    grade: Optional[float] = None
+    pets: Optional[List[Pet]] = None
+
+
+class Owner(Person):
+    job: Optional[str] = None
+    spouse: Optional[Spouse] = None
+    children: Optional[List[Child]] = None
+
+
+class House(Property):
+    address: str
+    owner: Optional[Owner] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/custom_formatters.py 0.45.0-1/tests/data/expected/main/jsonschema/custom_formatters.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/custom_formatters.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/custom_formatters.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+# MIT License
+#
+# Copyright (c) 2023 Blah-blah
+#
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel, Field, conint
+
+
+class Person(BaseModel):
+    firstName: Optional[str] = Field(None, description="The person's first name.")
+    lastName: Optional[str] = Field(None, description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[List[Any]] = None
+    comment: None = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/custom_type_path.py 0.45.0-1/tests/data/expected/main/jsonschema/custom_type_path.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/custom_type_path.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/custom_type_path.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,36 @@
+# generated by datamodel-codegen:
+#   filename:  custom_type_path.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+from custom import MultipleLineString, SpecialString, TitleString
+from custom.collection.array import Friends
+from custom.special import UpperString
+from custom.special.numbers import Age
+
+
+class Person(BaseModel):
+    class Config:
+        arbitrary_types_allowed = True
+
+    firstName: Optional[TitleString] = Field(
+        None, description="The person's first name."
+    )
+    lastName: Optional[UpperString] = Field(None, description="The person's last name.")
+    age: Optional[Age] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[Friends] = None
+    comment: Optional[MultipleLineString] = None
+
+
+class RootedCustomType(BaseModel):
+    class Config:
+        arbitrary_types_allowed = True
+
+    __root__: SpecialString
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/custom_type_path_pydantic_v2.py 0.45.0-1/tests/data/expected/main/jsonschema/custom_type_path_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/custom_type_path_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/custom_type_path_pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,36 @@
+# generated by datamodel-codegen:
+#   filename:  custom_type_path.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, ConfigDict, Field, RootModel
+
+from custom import MultipleLineString, SpecialString, TitleString
+from custom.collection.array import Friends
+from custom.special import UpperString
+from custom.special.numbers import Age
+
+
+class Person(BaseModel):
+    model_config = ConfigDict(
+        arbitrary_types_allowed=True,
+    )
+    firstName: Optional[TitleString] = Field(
+        None, description="The person's first name."
+    )
+    lastName: Optional[UpperString] = Field(None, description="The person's last name.")
+    age: Optional[Age] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[Friends] = None
+    comment: Optional[MultipleLineString] = None
+
+
+class RootedCustomType(RootModel[SpecialString]):
+    model_config = ConfigDict(
+        arbitrary_types_allowed=True,
+    )
+    root: SpecialString
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/dataclass_const.py 0.45.0-1/tests/data/expected/main/jsonschema/dataclass_const.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/dataclass_const.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/dataclass_const.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  const.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Literal
+
+
+@dataclass
+class Const:
+    foo: Literal['foo']
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/dataclass_field.py 0.45.0-1/tests/data/expected/main/jsonschema/dataclass_field.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/dataclass_field.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/dataclass_field.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  user.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+from typing import Any, List, Optional
+
+from typing_extensions import TypeAlias
+
+Model: TypeAlias = Any
+
+
+@dataclass
+class User:
+    name: Optional[str] = None
+    pets: List[User] = field(default_factory=list)
+
+
+@dataclass
+class Pet:
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/dataclass_field_default.py 0.45.0-1/tests/data/expected/main/jsonschema/dataclass_field_default.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/dataclass_field_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/dataclass_field_default.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  user_default.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+from typing import Any, List, Optional
+
+from typing_extensions import TypeAlias
+
+Model: TypeAlias = Any
+
+
+@dataclass
+class User:
+    name: Optional[str] = None
+    pets: Optional[List[User]] = field(default_factory=lambda: ['dog', 'cat'])
+
+
+@dataclass
+class Pet:
+    name: Optional[str] = 'dog'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/dataclass_field_py312.py 0.45.0-1/tests/data/expected/main/jsonschema/dataclass_field_py312.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/dataclass_field_py312.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/dataclass_field_py312.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  user.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+from typing import Any, List, Optional
+
+type Model = Any
+
+
+@dataclass
+class User:
+    name: Optional[str] = None
+    pets: List[User] = field(default_factory=list)
+
+
+@dataclass
+class Pet:
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_literals.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_literals.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_literals.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_literals.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,28 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_literals.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Type1(BaseModel):
+    type_: Literal['a'] = Field('a', title='Type ')
+
+
+class Type2(BaseModel):
+    type_: Literal['b'] = Field('b', title='Type ')
+
+
+class UnrelatedType(BaseModel):
+    info: Optional[str] = Field(
+        'Unrelated type, not involved in the discriminated union',
+        title='A way to check for side effects',
+    )
+
+
+class Response(BaseModel):
+    inner: Union[Type1, Type2] = Field(..., discriminator='type_', title='Inner')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_literals_msgspec.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_literals_msgspec.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_literals_msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_literals_msgspec.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_literals.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Union
+
+from msgspec import Meta, Struct, UnsetType
+
+
+class Type1(Struct, tag_field='type_', tag='a'):
+    pass
+
+
+class Type2(Struct, tag_field='type_', tag='b'):
+    pass
+
+
+class UnrelatedType(Struct):
+    info: Union[
+        Annotated[str, Meta(title='A way to check for side effects')], UnsetType
+    ] = 'Unrelated type, not involved in the discriminated union'
+
+
+class Response(Struct):
+    inner: Annotated[Union[Type1, Type2], Meta(title='Inner')]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_literals_msgspec_keyword_only.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_literals_msgspec_keyword_only.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_literals_msgspec_keyword_only.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_literals_msgspec_keyword_only.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_literals.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Union
+
+from msgspec import Meta, Struct, UnsetType
+
+
+class Type1(Struct, kw_only=True, tag_field='type_', tag='a'):
+    pass
+
+
+class Type2(Struct, kw_only=True, tag_field='type_', tag='b'):
+    pass
+
+
+class UnrelatedType(Struct, kw_only=True):
+    info: Union[
+        Annotated[str, Meta(title='A way to check for side effects')], UnsetType
+    ] = 'Unrelated type, not involved in the discriminated union'
+
+
+class Response(Struct, kw_only=True):
+    inner: Annotated[Union[Type1, Type2], Meta(title='Inner')]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_literals_msgspec_keyword_only_omit_defaults.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_literals_msgspec_keyword_only_omit_defaults.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_literals_msgspec_keyword_only_omit_defaults.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_literals_msgspec_keyword_only_omit_defaults.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_literals.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Union
+
+from msgspec import Meta, Struct, UnsetType
+
+
+class Type1(Struct, omit_defaults=True, kw_only=True, tag_field='type_', tag='a'):
+    pass
+
+
+class Type2(Struct, omit_defaults=True, kw_only=True, tag_field='type_', tag='b'):
+    pass
+
+
+class UnrelatedType(Struct, omit_defaults=True, kw_only=True):
+    info: Union[
+        Annotated[str, Meta(title='A way to check for side effects')], UnsetType
+    ] = 'Unrelated type, not involved in the discriminated union'
+
+
+class Response(Struct, omit_defaults=True, kw_only=True):
+    inner: Annotated[Union[Type1, Type2], Meta(title='Inner')]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_no_mapping.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_no_mapping.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_no_mapping.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_no_mapping.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_no_mapping.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Union
+
+from pydantic import BaseModel, Field
+
+
+class Cat(BaseModel):
+    pet_type: Literal['cat']
+
+
+class Dog(BaseModel):
+    pet_type: Literal['dog']
+
+
+class Animal(BaseModel):
+    pet: Union[Cat, Dog] = Field(..., discriminator='pet_type', title='Pet')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_reference.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_reference.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_reference.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,36 @@
+# generated by datamodel-codegen:
+#   filename:  schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Type1(BaseModel):
+    type_: Literal['a'] = Field('a', title='Type ')
+
+
+class Type2(BaseModel):
+    type_: Literal['b'] = Field('b', title='Type ')
+    ref_type: Optional[Type1] = Field(None, description='A referenced type.')
+
+
+class Type4(BaseModel):
+    type_: Literal['d'] = Field('d', title='Type ')
+
+
+class Type5(BaseModel):
+    type_: Literal['e'] = Field('e', title='Type ')
+
+
+class Type3(BaseModel):
+    type_: Literal['c'] = Field('c', title='Type ')
+
+
+class Response(BaseModel):
+    inner: Union[Type1, Type2, Type3, Type4, Type5] = Field(
+        ..., discriminator='type_', title='Inner'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_reference_msgspec.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_reference_msgspec.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_reference_msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_reference_msgspec.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,35 @@
+# generated by datamodel-codegen:
+#   filename:  schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Union
+
+from msgspec import UNSET, Meta, Struct, UnsetType
+
+
+class Type1(Struct, tag_field='type_', tag='a'):
+    pass
+
+
+class Type2(Struct, tag_field='type_', tag='b'):
+    ref_type: Union[
+        Annotated[Type1, Meta(description='A referenced type.')], UnsetType
+    ] = UNSET
+
+
+class Type4(Struct, tag_field='type_', tag='d'):
+    pass
+
+
+class Type5(Struct, tag_field='type_', tag='e'):
+    pass
+
+
+class Type3(Struct, tag_field='type_', tag='c'):
+    pass
+
+
+class Response(Struct):
+    inner: Annotated[Union[Type1, Type2, Type3, Type4, Type5], Meta(title='Inner')]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_external_reference
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_external_reference
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/artificial_folder/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/artificial_folder/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/artificial_folder/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/artificial_folder/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_external_reference
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/artificial_folder/type_1.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/artificial_folder/type_1.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/artificial_folder/type_1.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/artificial_folder/type_1.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  inner_folder/artificial_folder/type-1.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal
+
+from pydantic import BaseModel, Field
+
+
+class Type1(BaseModel):
+    type_: Literal['a'] = Field(..., const=True, title='Type ')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/schema.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/schema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  inner_folder/schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Union
+
+from pydantic import BaseModel, Field
+
+from .. import type_4
+from ..subfolder import type_5
+from . import type_2
+from .artificial_folder import type_1
+
+
+class Type3(BaseModel):
+    type_: Literal['c'] = Field(..., const=True, title='Type ')
+
+
+class Response(BaseModel):
+    inner: Union[type_1.Type1, type_2.Type2, Type3, type_4.Type4, type_5.Type5] = Field(
+        ..., discriminator='type_', title='Inner'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/type_2.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/type_2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/type_2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/inner_folder/type_2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  inner_folder/type-2.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional
+
+from pydantic import BaseModel, Field
+
+from .artificial_folder import type_1
+
+
+class Type2(BaseModel):
+    type_: Literal['b'] = Field(..., const=True, title='Type ')
+    ref_type: Optional[type_1.Type1] = Field(None, description='A referenced type.')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/subfolder/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/subfolder/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/subfolder/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/subfolder/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_external_reference
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/subfolder/type_5.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/subfolder/type_5.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/subfolder/type_5.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/subfolder/type_5.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  subfolder/type-5.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal
+
+from pydantic import BaseModel, Field
+
+
+class Type5(BaseModel):
+    type_: Literal['e'] = Field(..., const=True, title='Type ')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/type_4.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/type_4.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/type_4.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder/type_4.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  type-4.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal
+
+from pydantic import BaseModel, Field
+
+
+class Type4(BaseModel):
+    type_: Literal['d'] = Field(..., const=True, title='Type ')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_external_reference
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_external_reference
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/artificial_folder/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/artificial_folder/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/artificial_folder/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/artificial_folder/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_external_reference
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/artificial_folder/type_1.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/artificial_folder/type_1.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/artificial_folder/type_1.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/artificial_folder/type_1.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  inner_folder/artificial_folder/type-1.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from msgspec import Struct
+
+
+class Type1(Struct, tag_field='type_', tag='a'):
+    pass
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/schema.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/schema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  inner_folder/schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Union
+
+from msgspec import Meta, Struct
+
+from .. import type_4
+from ..subfolder import type_5
+from . import type_2
+from .artificial_folder import type_1
+
+
+class Type3(Struct, tag_field='type_', tag='c'):
+    pass
+
+
+class Response(Struct):
+    inner: Annotated[
+        Union[type_1.Type1, type_2.Type2, Type3, type_4.Type4, type_5.Type5],
+        Meta(title='Inner'),
+    ]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/type_2.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/type_2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/type_2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/inner_folder/type_2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  inner_folder/type-2.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Union
+
+from msgspec import UNSET, Meta, Struct, UnsetType
+
+from .artificial_folder import type_1
+
+
+class Type2(Struct, tag_field='type_', tag='b'):
+    ref_type: Union[
+        Annotated[type_1.Type1, Meta(description='A referenced type.')], UnsetType
+    ] = UNSET
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/subfolder/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/subfolder/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/subfolder/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/subfolder/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_external_reference
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/subfolder/type_5.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/subfolder/type_5.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/subfolder/type_5.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/subfolder/type_5.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  subfolder/type-5.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from msgspec import Struct
+
+
+class Type5(Struct, tag_field='type_', tag='e'):
+    pass
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/type_4.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/type_4.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/type_4.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_external_references_folder_msgspec/type_4.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  type-4.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from msgspec import Struct
+
+
+class Type4(Struct, tag_field='type_', tag='d'):
+    pass
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_meta_msgspec.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_meta_msgspec.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_meta_msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_meta_msgspec.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_meta_msgspec.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Union
+
+from msgspec import UNSET, Struct, UnsetType
+
+
+class SystemMessage(Struct, tag_field='role', tag='system'):
+    content: str
+
+
+class UserMessage(Struct, tag_field='role', tag='user'):
+    content: str
+
+
+class Model(Struct):
+    message: Union[SystemMessage, UserMessage, UnsetType] = UNSET
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_type_string_msgspec.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_type_string_msgspec.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_type_string_msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_type_string_msgspec.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_type_string.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Union
+
+from msgspec import UNSET, Struct, UnsetType
+
+
+class SystemMessage(Struct, tag_field='role', tag='system'):
+    content: str
+
+
+class UserMessage(Struct, tag_field='role', tag='user'):
+    content: str
+
+
+class Model(Struct):
+    message: Union[SystemMessage, UserMessage, UnsetType] = UNSET
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_type_string_msgspec_no_annotated.py 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_type_string_msgspec_no_annotated.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/discriminator_with_type_string_msgspec_no_annotated.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/discriminator_with_type_string_msgspec_no_annotated.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_type_string.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Union
+
+from msgspec import UNSET, Struct, UnsetType
+
+
+class SystemMessage(Struct, tag_field='role', tag='system'):
+    content: str
+
+
+class UserMessage(Struct, tag_field='role', tag='user'):
+    content: str
+
+
+class Model(Struct):
+    message: Union[SystemMessage, UserMessage, UnsetType] = UNSET
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  duplicate_field_constraints
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints/common.py 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints/common.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints/common.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints/common.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  common.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any
+
+from pydantic import RootModel
+
+
+class Model(RootModel[Any]):
+    root: Any
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints/test.py 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints/test.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints/test.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints/test.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  test.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field, constr
+
+
+class Test(BaseModel):
+    uid: constr(pattern=r'[0-9ABCDEFGHJKMNPQRSTVWXYZ]{26,26}') = Field(
+        ..., description='ulid of this object'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  duplicate_field_constraints
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/common.py 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/common.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/common.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/common.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  common.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Any
+
+from msgspec import Meta
+from typing_extensions import TypeAlias
+
+Model: TypeAlias = Any
+
+
+Ulid: TypeAlias = Annotated[str, Meta(pattern='[0-9ABCDEFGHJKMNPQRSTVWXYZ]{26,26}')]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/test.py 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/test.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/test.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec/test.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  test.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated
+
+from msgspec import Meta, Struct
+
+from . import common
+
+
+class Test(Struct):
+    uid: Annotated[common.Ulid, Meta(description='ulid of this object')]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec_py38_collapse_root_models/common.py 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec_py38_collapse_root_models/common.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec_py38_collapse_root_models/common.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec_py38_collapse_root_models/common.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+# generated by datamodel-codegen:
+#   filename:  common.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any
+
+Model = Any
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec_py38_collapse_root_models/test.py 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec_py38_collapse_root_models/test.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec_py38_collapse_root_models/test.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_field_constraints_msgspec_py38_collapse_root_models/test.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  test.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from msgspec import Annotated, Meta, Struct
+
+
+class Test(Struct):
+    uid: Annotated[
+        str,
+        Meta(
+            description='ulid of this object',
+            pattern='[0-9ABCDEFGHJKMNPQRSTVWXYZ]{26,26}',
+        ),
+    ]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_name/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_name/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_name/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_name/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  duplicate_name
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_name/bar.py 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_name/bar.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_name/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_name/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  bar.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Bar(BaseModel):
+    pass
+
+
+class LogLevels(BaseModel):
+    __root__: str = Field(..., description='Supported logging levels')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_name/foo.py 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_name/foo.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duplicate_name/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/duplicate_name/foo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  foo.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Foo(BaseModel):
+    pass
+
+
+class LogLevels(BaseModel):
+    __root__: str = Field(..., description='Supported logging levels')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duration_msgspec.py 0.45.0-1/tests/data/expected/main/jsonschema/duration_msgspec.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duration_msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/duration_msgspec.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  duration.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import timedelta
+from typing import Any, Union
+
+from msgspec import UNSET, Struct, UnsetType
+from typing_extensions import TypeAlias
+
+Model: TypeAlias = Any
+
+
+class Test(Struct):
+    s_duration: Union[timedelta, UnsetType] = UNSET
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/duration_pydantic_v2.py 0.45.0-1/tests/data/expected/main/jsonschema/duration_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/duration_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/duration_pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  duration.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import timedelta
+from typing import Any, Optional
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class Model(RootModel[Any]):
+    root: Any
+
+
+class Test(BaseModel):
+    s_duration: Optional[timedelta] = Field(None, examples=['PT2H33M3S'])
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/enum_specialized.py 0.45.0-1/tests/data/expected/main/jsonschema/enum_specialized.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/enum_specialized.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/enum_specialized.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,53 @@
+# generated by datamodel-codegen:
+#   filename:  subclass_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum, IntEnum, StrEnum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class IntEnumModel(IntEnum):
+    integer_1 = 1
+    integer_2 = 2
+    integer_3 = 3
+
+
+class FloatEnum(Enum):
+    number_1_1 = 1.1
+    number_2_1 = 2.1
+    number_3_1 = 3.1
+
+
+class StrEnumModel(StrEnum):
+    field_1 = '1'
+    field_2 = '2'
+    field_3 = '3'
+
+
+class NonTypedEnum(Enum):
+    field_1 = '1'
+    field_2 = '2'
+    field_3 = '3'
+
+
+class BooleanEnum(Enum):
+    boolean_True = True
+    boolean_False = False
+
+
+class UnknownEnum(Enum):
+    a = 'a'
+    b = 'b'
+
+
+class Model(BaseModel):
+    IntEnum: Optional[IntEnumModel] = None
+    FloatEnum: Optional[FloatEnum] = None
+    StrEnum: Optional[StrEnumModel] = None
+    NonTypedEnum: Optional[NonTypedEnum] = None
+    BooleanEnum: Optional[BooleanEnum] = None
+    UnknownEnum: Optional[UnknownEnum] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/enum_specialized_disable.py 0.45.0-1/tests/data/expected/main/jsonschema/enum_specialized_disable.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/enum_specialized_disable.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/enum_specialized_disable.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,53 @@
+# generated by datamodel-codegen:
+#   filename:  subclass_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class IntEnum(Enum):
+    integer_1 = 1
+    integer_2 = 2
+    integer_3 = 3
+
+
+class FloatEnum(Enum):
+    number_1_1 = 1.1
+    number_2_1 = 2.1
+    number_3_1 = 3.1
+
+
+class StrEnum(Enum):
+    field_1 = '1'
+    field_2 = '2'
+    field_3 = '3'
+
+
+class NonTypedEnum(Enum):
+    field_1 = '1'
+    field_2 = '2'
+    field_3 = '3'
+
+
+class BooleanEnum(Enum):
+    boolean_True = True
+    boolean_False = False
+
+
+class UnknownEnum(Enum):
+    a = 'a'
+    b = 'b'
+
+
+class Model(BaseModel):
+    IntEnum: Optional[IntEnum] = None
+    FloatEnum: Optional[FloatEnum] = None
+    StrEnum: Optional[StrEnum] = None
+    NonTypedEnum: Optional[NonTypedEnum] = None
+    BooleanEnum: Optional[BooleanEnum] = None
+    UnknownEnum: Optional[UnknownEnum] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/external_definitions.py 0.45.0-1/tests/data/expected/main/jsonschema/external_definitions.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/external_definitions.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/external_definitions.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  external_definitions_root.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Extra, constr
+
+
+class ElegantName(BaseModel):
+    __root__: constr(min_length=3)
+
+
+class Person(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    name: ElegantName
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/external_files.py 0.45.0-1/tests/data/expected/main/jsonschema/external_files.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/external_files.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/external_files.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  external_parent_root.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ExternalChildRoot(BaseModel):
+    somefield: Optional[int] = None
+
+
+class Object(BaseModel):
+    metadata: ExternalChildRoot
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/external_files_in_directory.py 0.45.0-1/tests/data/expected/main/jsonschema/external_files_in_directory.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/external_files_in_directory.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/external_files_in_directory.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,77 @@
+# generated by datamodel-codegen:
+#   filename:  person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional, Union
+
+from pydantic import BaseModel, Extra, Field, conint
+
+
+class Fur(Enum):
+    Short_hair = 'Short hair'
+    Long_hair = 'Long hair'
+
+
+class Noodle(Enum):
+    ramen = 'ramen'
+    spaghetti = 'spaghetti'
+
+
+class Soup(Enum):
+    bean = 'bean'
+    mushroom = 'mushroom'
+    tomato = 'tomato'
+
+
+class Coffee(Enum):
+    Black = 'Black'
+    Espresso = 'Espresso'
+
+
+class Tea(Enum):
+    Oolong = 'Oolong'
+    Green = 'Green'
+
+
+class Pet(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+    fur: Optional[Fur] = None
+
+
+class Friend(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    name: str = Field(..., example='John Doe')
+    phone_number: Optional[str] = Field(None, example='(555) 555-1234')
+    food: Optional[List[Union[Noodle, Soup]]] = None
+
+
+class Friends(BaseModel):
+    __root__: List[Friend] = Field(..., title='Friends')
+
+
+class Person(BaseModel):
+    first_name: str = Field(..., description="The person's first name.")
+    last_name: str = Field(..., description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(None, description='Age in years.')
+    pets: Optional[List[Pet]] = None
+    friends: Optional[Friends] = None
+    robot: Optional[Robot] = None
+    comment: None = None
+    drink: Optional[List[Union[Coffee, Tea]]] = None
+    food: Optional[List[Union[Noodle, Soup]]] = None
+
+
+class Robot(Pet):
+    friends: Optional[Person] = None
+    drink: Optional[Coffee] = None
+    food: Optional[Noodle] = None
+    pet: Optional[Pet] = None
+
+
+Person.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/external_other_ref2.py 0.45.0-1/tests/data/expected/main/jsonschema/external_other_ref2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/external_other_ref2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/external_other_ref2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  other/ref2.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Other(BaseModel):
+    key: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/external_ref0.py 0.45.0-1/tests/data/expected/main/jsonschema/external_ref0.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/external_ref0.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/external_ref0.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  ref0.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import ref2
+from .other import ref2 as ref2_1
+
+
+class Model(BaseModel):
+    ref1: Optional[ref2.Model] = None
+    other_ref1: Optional[ref2_1.Other] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_allow.py 0.45.0-1/tests/data/expected/main/jsonschema/extra_fields_allow.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_allow.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/extra_fields_allow.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  extra_fields.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Extra
+
+
+class Foo(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    x: Optional[int] = None
+
+
+class Bar(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    y: Optional[int] = None
+
+
+class Baz(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    z: Optional[int] = None
+
+
+class Test(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    foo: Foo
+    bar: Optional[Bar] = None
+    baz: Optional[Baz] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_forbid.py 0.45.0-1/tests/data/expected/main/jsonschema/extra_fields_forbid.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_forbid.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/extra_fields_forbid.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  extra_fields.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Extra
+
+
+class Foo(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    x: Optional[int] = None
+
+
+class Bar(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    y: Optional[int] = None
+
+
+class Baz(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    z: Optional[int] = None
+
+
+class Test(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    foo: Foo
+    bar: Optional[Bar] = None
+    baz: Optional[Baz] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_ignore.py 0.45.0-1/tests/data/expected/main/jsonschema/extra_fields_ignore.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_ignore.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/extra_fields_ignore.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  extra_fields.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Extra
+
+
+class Foo(BaseModel):
+    class Config:
+        extra = Extra.ignore
+
+    x: Optional[int] = None
+
+
+class Bar(BaseModel):
+    class Config:
+        extra = Extra.ignore
+
+    y: Optional[int] = None
+
+
+class Baz(BaseModel):
+    class Config:
+        extra = Extra.ignore
+
+    z: Optional[int] = None
+
+
+class Test(BaseModel):
+    class Config:
+        extra = Extra.ignore
+
+    foo: Foo
+    bar: Optional[Bar] = None
+    baz: Optional[Baz] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_v2_allow.py 0.45.0-1/tests/data/expected/main/jsonschema/extra_fields_v2_allow.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_v2_allow.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/extra_fields_v2_allow.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  extra_fields.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, ConfigDict
+
+
+class Foo(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    x: Optional[int] = None
+
+
+class Bar(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    y: Optional[int] = None
+
+
+class Baz(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    z: Optional[int] = None
+
+
+class Test(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    foo: Foo
+    bar: Optional[Bar] = None
+    baz: Optional[Baz] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_v2_forbid.py 0.45.0-1/tests/data/expected/main/jsonschema/extra_fields_v2_forbid.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_v2_forbid.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/extra_fields_v2_forbid.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  extra_fields.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, ConfigDict
+
+
+class Foo(BaseModel):
+    model_config = ConfigDict(
+        extra='forbid',
+    )
+    x: Optional[int] = None
+
+
+class Bar(BaseModel):
+    model_config = ConfigDict(
+        extra='forbid',
+    )
+    y: Optional[int] = None
+
+
+class Baz(BaseModel):
+    model_config = ConfigDict(
+        extra='forbid',
+    )
+    z: Optional[int] = None
+
+
+class Test(BaseModel):
+    model_config = ConfigDict(
+        extra='forbid',
+    )
+    foo: Foo
+    bar: Optional[Bar] = None
+    baz: Optional[Baz] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_v2_ignore.py 0.45.0-1/tests/data/expected/main/jsonschema/extra_fields_v2_ignore.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/extra_fields_v2_ignore.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/extra_fields_v2_ignore.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  extra_fields.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, ConfigDict
+
+
+class Foo(BaseModel):
+    model_config = ConfigDict(
+        extra='ignore',
+    )
+    x: Optional[int] = None
+
+
+class Bar(BaseModel):
+    model_config = ConfigDict(
+        extra='ignore',
+    )
+    y: Optional[int] = None
+
+
+class Baz(BaseModel):
+    model_config = ConfigDict(
+        extra='ignore',
+    )
+    z: Optional[int] = None
+
+
+class Test(BaseModel):
+    model_config = ConfigDict(
+        extra='ignore',
+    )
+    foo: Foo
+    bar: Optional[Bar] = None
+    baz: Optional[Baz] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/extras_in_oneof.py 0.45.0-1/tests/data/expected/main/jsonschema/extras_in_oneof.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/extras_in_oneof.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/extras_in_oneof.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,45 @@
+# generated by datamodel-codegen:
+#   filename:  extras_in_oneof.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class OneofProp(BaseModel):
+    shared_prop: Optional[str] = Field(
+        None, json_schema_extra={'x-shared': 'shared_value'}
+    )
+    variant_a_prop: Optional[str] = Field(
+        None, json_schema_extra={'x-variant': 'variant_a_value'}
+    )
+
+
+class OneofProp1(BaseModel):
+    shared_prop: Optional[str] = Field(
+        None, json_schema_extra={'x-shared': 'shared_value'}
+    )
+    variant_b_prop: Optional[int] = Field(
+        None, json_schema_extra={'x-variant': 'variant_b_value'}
+    )
+
+
+class AnyofProp(BaseModel):
+    any_a_prop: Optional[str] = Field(None, json_schema_extra={'x-any': 'any_a_value'})
+
+
+class AnyofProp1(BaseModel):
+    any_b_prop: Optional[bool] = Field(None, json_schema_extra={'x-any': 'any_b_value'})
+
+
+class ExtrasInOneOf(BaseModel):
+    simple_prop: Optional[str] = Field(
+        None, json_schema_extra={'x-custom': 'simple_value'}
+    )
+    oneof_prop: Optional[Union[OneofProp, OneofProp1]] = Field(
+        None, json_schema_extra={'x-parent-custom': 'parent_value'}
+    )
+    anyof_prop: Optional[Union[AnyofProp, AnyofProp1]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/field_extras.py 0.45.0-1/tests/data/expected/main/jsonschema/field_extras.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/field_extras.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/field_extras.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  extras.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Extras(BaseModel):
+    name: Optional[str] = Field(None, description='normal key', example='example')
+    age: Optional[int] = Field(None, example=12, examples=[13, 20])
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_field_extra_keys.py 0.45.0-1/tests/data/expected/main/jsonschema/field_extras_field_extra_keys.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_field_extra_keys.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/field_extras_field_extra_keys.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  extras.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Extras(BaseModel):
+    name: Optional[str] = Field(
+        None,
+        description='normal key',
+        example='example',
+        invalid_key_1='abc',
+        key2=456,
+        repr=True,
+    )
+    age: Optional[int] = Field(None, example=12, examples=[13, 20])
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_field_extra_keys_v2.py 0.45.0-1/tests/data/expected/main/jsonschema/field_extras_field_extra_keys_v2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_field_extra_keys_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/field_extras_field_extra_keys_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,22 @@
+# generated by datamodel-codegen:
+#   filename:  extras.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Extras(BaseModel):
+    name: Optional[str] = Field(
+        None,
+        description='normal key',
+        examples=['example'],
+        json_schema_extra={'key2': 456, 'invalid-key-1': 'abc'},
+        repr=True,
+    )
+    age: Optional[int] = Field(
+        None, examples=[13, 20], json_schema_extra={'example': 12}
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_field_include_all_keys.py 0.45.0-1/tests/data/expected/main/jsonschema/field_extras_field_include_all_keys.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_field_include_all_keys.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/field_extras_field_include_all_keys.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+# generated by datamodel-codegen:
+#   filename:  extras.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Extras(BaseModel):
+    name: Optional[str] = Field(
+        None,
+        description='normal key',
+        example='example',
+        field_comment='comment',
+        field_exclude=123,
+        field_invalid_key_2='efg',
+        invalid_key_1='abc',
+        key1=123,
+        key2=456,
+        readOnly=True,
+        register_='hij',
+        repr=True,
+        schema_='klm',
+        x_abc=True,
+    )
+    age: Optional[int] = Field(None, example=12, examples=[13, 20], writeOnly=True)
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_field_include_all_keys_v2.py 0.45.0-1/tests/data/expected/main/jsonschema/field_extras_field_include_all_keys_v2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_field_include_all_keys_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/field_extras_field_include_all_keys_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  extras.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Extras(BaseModel):
+    name: Optional[str] = Field(
+        None,
+        description='normal key',
+        examples=['example'],
+        json_schema_extra={
+            'key1': 123,
+            'key2': 456,
+            '$exclude': 123,
+            'invalid-key-1': 'abc',
+            '-invalid+key_2': 'efg',
+            '$comment': 'comment',
+            'register': 'hij',
+            'schema': 'klm',
+            'x-abc': True,
+            'readOnly': True,
+        },
+        repr=True,
+    )
+    age: Optional[int] = Field(
+        None, examples=[13, 20], json_schema_extra={'example': 12, 'writeOnly': True}
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_v2.py 0.45.0-1/tests/data/expected/main/jsonschema/field_extras_v2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/field_extras_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/field_extras_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  extras.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Extras(BaseModel):
+    name: Optional[str] = Field(None, description='normal key', examples=['example'])
+    age: Optional[int] = Field(
+        None, examples=[13, 20], json_schema_extra={'example': 12}
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/field_has_same_name.py 0.45.0-1/tests/data/expected/main/jsonschema/field_has_same_name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/field_has_same_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/field_has_same_name.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  field_has_same_name.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class TestObject(BaseModel):
+    test_string: Optional[str] = None
+
+
+class Test(BaseModel):
+    TestObject: Optional[TestObject] = Field(None, title='TestObject')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/field_has_same_name_v2.py 0.45.0-1/tests/data/expected/main/jsonschema/field_has_same_name_v2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/field_has_same_name_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/field_has_same_name_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  field_has_same_name.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class TestObject(BaseModel):
+    test_string: Optional[str] = None
+
+
+class Test(BaseModel):
+    TestObject_1: Optional[TestObject] = Field(
+        None, alias='TestObject', title='TestObject'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/field_name_shadows_class_name.py 0.45.0-1/tests/data/expected/main/jsonschema/field_name_shadows_class_name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/field_name_shadows_class_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/field_name_shadows_class_name.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  field_name_shadows_class_name.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Elem(BaseModel):
+    temp: str
+
+
+class Model(BaseModel):
+    Elem_1: Optional[List[Elem]] = Field(None, alias='Elem')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/forwarding_reference/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/forwarding_reference/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/forwarding_reference/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/forwarding_reference/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  forwarding_reference
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/forwarding_reference/commons.py 0.45.0-1/tests/data/expected/main/jsonschema/forwarding_reference/commons.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/forwarding_reference/commons.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/forwarding_reference/commons.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  commons.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/forwarding_reference/forwarding.py 0.45.0-1/tests/data/expected/main/jsonschema/forwarding_reference/forwarding.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/forwarding_reference/forwarding.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/forwarding_reference/forwarding.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  forwarding.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class ForwardingArray(BaseModel):
+    __root__: List[Any]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/forwarding_reference/schema.py 0.45.0-1/tests/data/expected/main/jsonschema/forwarding_reference/schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/forwarding_reference/schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/forwarding_reference/schema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any
+
+from pydantic import BaseModel
+
+from . import forwarding
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Pets(BaseModel):
+    __root__: forwarding.ForwardingArray
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/general.py 0.45.0-1/tests/data/expected/main/jsonschema/general.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/general.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/general.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel, Field, conint
+
+
+class Person(BaseModel):
+    firstName: Optional[str] = Field(None, description="The person's first name.")
+    lastName: Optional[str] = Field(None, description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[List[Any]] = None
+    comment: None = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/general_dataclass_frozen_kw_only.py 0.45.0-1/tests/data/expected/main/jsonschema/general_dataclass_frozen_kw_only.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/general_dataclass_frozen_kw_only.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/general_dataclass_frozen_kw_only.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Any, List, Optional
+
+
+@dataclass(frozen=True, kw_only=True)
+class Person:
+    firstName: Optional[str] = None
+    lastName: Optional[str] = None
+    age: Optional[int] = None
+    friends: Optional[List[Any]] = None
+    comment: None = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/generate_non_pydantic_output.py 0.45.0-1/tests/data/expected/main/jsonschema/generate_non_pydantic_output.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/generate_non_pydantic_output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/generate_non_pydantic_output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  simple_string.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+
+
+@dataclass
+class Model:
+    s: str
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/has_default_value.py 0.45.0-1/tests/data/expected/main/jsonschema/has_default_value.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/has_default_value.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/has_default_value.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,43 @@
+# generated by datamodel-codegen:
+#   filename:  has_default_value.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class TeamType(Enum):
+    Department = 'Department'
+    Division = 'Division'
+    BusinessUnit = 'BusinessUnit'
+    Organization = 'Organization'
+
+
+class ID(BaseModel):
+    __root__: str
+
+
+class Pet(BaseModel):
+    name: Optional[str] = None
+
+
+class Family(BaseModel):
+    __root__: List[ID]
+
+
+class FamilyPets(BaseModel):
+    __root__: List[Pet]
+
+
+class Person(BaseModel):
+    id: Optional[ID] = Field(default_factory=lambda: ID.parse_obj('abc'))
+    user: Optional[Pet] = None
+    firstName: Optional[str] = Field(None, description="The person's first name.")
+    team: Optional[TeamType] = 'Department'
+    anotherTeam: Optional[TeamType] = 'Department'
+    Family: Optional[Family] = None
+    FamilyPets: Optional[FamilyPets] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/id.py 0.45.0-1/tests/data/expected/main/jsonschema/id.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/id.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/id.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  id.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Address(BaseModel):
+    street_address: str
+    city: str
+    state: str
+
+
+class Model(BaseModel):
+    billing_address: Optional[Address] = None
+    shipping_address: Optional[Address] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/id_stdin.py 0.45.0-1/tests/data/expected/main/jsonschema/id_stdin.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/id_stdin.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/id_stdin.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  <stdin>
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Address(BaseModel):
+    street_address: str
+    city: str
+    state: str
+
+
+class Model(BaseModel):
+    billing_address: Optional[Address] = None
+    shipping_address: Optional[Address] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/ids/ContactPoint.py 0.45.0-1/tests/data/expected/main/jsonschema/ids/ContactPoint.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/ids/ContactPoint.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/ids/ContactPoint.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  Organization.schema.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel, EmailStr
+
+
+class Type(Enum):
+    ContactPoint = 'ContactPoint'
+
+
+class Schema(BaseModel):
+    type: Type
+    contactType: Optional[str] = None
+    email: EmailStr
+    telephone: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/ids/URI.py 0.45.0-1/tests/data/expected/main/jsonschema/ids/URI.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/ids/URI.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/ids/URI.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  Organization.schema.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Schema(BaseModel):
+    __root__: AnyUrl = Field(..., description='String representing a URI.', title='URI')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/ids/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/ids/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/ids/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/ids/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,28 @@
+# generated by datamodel-codegen:
+#   filename:  Organization.schema.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import URI, ContactPoint
+from . import id as id_1
+from . import name as name_1
+from . import sameAs as sameAs_1
+
+
+class Type(Enum):
+    Organization = 'Organization'
+
+
+class Organization(BaseModel):
+    id: Optional[id_1.Schema] = None
+    type: Type
+    name: name_1.Schema
+    contactPoint: Optional[ContactPoint.Schema] = None
+    sameAs: Optional[sameAs_1.Schema] = None
+    url: Optional[URI.Schema] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/ids/id.py 0.45.0-1/tests/data/expected/main/jsonschema/ids/id.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/ids/id.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/ids/id.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  Organization.schema.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Schema(BaseModel):
+    __root__: str = Field(
+        ..., description='Identifier string of this object.', title='id'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/ids/name.py 0.45.0-1/tests/data/expected/main/jsonschema/ids/name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/ids/name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/ids/name.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  Organization.schema.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Schema(BaseModel):
+    __root__: str = Field(
+        ...,
+        description="A descriptive (full) name of the entity. For example, a dataset called 'Snow depth in the Northern Hemisphere' or a person called 'Sarah L. Jones' or a place called 'The Empire States Building'. Use unique names for distinct entities whenever possible.",
+        title='name',
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/ids/sameAs.py 0.45.0-1/tests/data/expected/main/jsonschema/ids/sameAs.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/ids/sameAs.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/ids/sameAs.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  Organization.schema.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+from . import URI
+
+
+class Schema(BaseModel):
+    __root__: URI.Schema = Field(
+        ...,
+        description='Use the sameAs property to indicate the most canonical URLs for the original in cases of the entity. For example this may be a link to the original metadata of a dataset, definition of a property, Person, Organization or Place.',
+        title='sameAs',
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/ids/type.py 0.45.0-1/tests/data/expected/main/jsonschema/ids/type.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/ids/type.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/ids/type.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  Organization.schema.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Schema(BaseModel):
+    __root__: str = Field(..., description='Type of this object.', title='type')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/imports_correct/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/imports_correct/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/imports_correct/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/imports_correct/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  imports_correct
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/imports_correct/schema.py 0.45.0-1/tests/data/expected/main/jsonschema/imports_correct/schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/imports_correct/schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/imports_correct/schema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+from . import type_1
+
+
+class Response(BaseModel):
+    inner: type_1.Type1 = Field(..., discriminator='type_', title='Inner')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/imports_correct/type_1.py 0.45.0-1/tests/data/expected/main/jsonschema/imports_correct/type_1.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/imports_correct/type_1.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/imports_correct/type_1.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  type_1.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal
+
+from pydantic import BaseModel, Field
+
+
+class Type(Enum):
+    a = 'a'
+    A = 'A'
+
+
+class Type1(BaseModel):
+    type_: Literal['a', 'A'] = Field(..., title='Type')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/inheritance_forward_ref.py 0.45.0-1/tests/data/expected/main/jsonschema/inheritance_forward_ref.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/inheritance_forward_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/inheritance_forward_ref.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,50 @@
+# generated by datamodel-codegen:
+#   filename:  inheritance_forward_ref.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class DogBase(BaseModel):
+    name: Optional[str] = Field(None, title='Name')
+    woof: Optional[bool] = Field(True, title='Woof')
+
+
+class PersonBase(BaseModel):
+    name: Optional[str] = Field(None, title='Name')
+
+
+class PersonsBestFriend(BaseModel):
+    people: Optional[List[Person]] = Field(None, title='People')
+    dogs: Optional[List[Dog]] = Field(None, title='Dogs')
+    dog_base: Optional[DogBase] = None
+    dog_relationships: Optional[DogRelationships] = None
+    person_base: Optional[PersonBase] = None
+    person_relationships: Optional[PersonRelationships] = None
+
+
+class DogRelationships(BaseModel):
+    people: Optional[List[Person]] = Field(None, title='People')
+
+
+class PersonRelationships(BaseModel):
+    people: Optional[List[Person]] = Field(None, title='People')
+
+
+class Dog(DogBase, DogRelationships):
+    pass
+
+
+class Person(PersonBase, PersonRelationships):
+    pass
+
+
+PersonsBestFriend.update_forward_refs()
+DogRelationships.update_forward_refs()
+PersonRelationships.update_forward_refs()
+Dog.update_forward_refs()
+Person.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/inheritance_forward_ref_keep_model_order.py 0.45.0-1/tests/data/expected/main/jsonschema/inheritance_forward_ref_keep_model_order.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/inheritance_forward_ref_keep_model_order.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/inheritance_forward_ref_keep_model_order.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,50 @@
+# generated by datamodel-codegen:
+#   filename:  inheritance_forward_ref.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class DogBase(BaseModel):
+    name: Optional[str] = Field(None, title='Name')
+    woof: Optional[bool] = Field(True, title='Woof')
+
+
+class DogRelationships(BaseModel):
+    people: Optional[List[Person]] = Field(None, title='People')
+
+
+class Dog(DogBase, DogRelationships):
+    pass
+
+
+class PersonBase(BaseModel):
+    name: Optional[str] = Field(None, title='Name')
+
+
+class PersonRelationships(BaseModel):
+    people: Optional[List[Person]] = Field(None, title='People')
+
+
+class Person(PersonBase, PersonRelationships):
+    pass
+
+
+class PersonsBestFriend(BaseModel):
+    people: Optional[List[Person]] = Field(None, title='People')
+    dogs: Optional[List[Dog]] = Field(None, title='Dogs')
+    dog_base: Optional[DogBase] = None
+    dog_relationships: Optional[DogRelationships] = None
+    person_base: Optional[PersonBase] = None
+    person_relationships: Optional[PersonRelationships] = None
+
+
+DogRelationships.update_forward_refs()
+Dog.update_forward_refs()
+PersonRelationships.update_forward_refs()
+Person.update_forward_refs()
+PersonsBestFriend.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/invalid_enum_name.py 0.45.0-1/tests/data/expected/main/jsonschema/invalid_enum_name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/invalid_enum_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/invalid_enum_name.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  invalid_enum_name.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InvalidEnum(Enum):
+    field_1_value = '1 value'
+    field_space = ' space'
+    field___special = '*- special'
+    schema = 'schema'
+    MRO = 'MRO'
+    mro_ = 'mro'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/invalid_enum_name_snake_case_field.py 0.45.0-1/tests/data/expected/main/jsonschema/invalid_enum_name_snake_case_field.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/invalid_enum_name_snake_case_field.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/invalid_enum_name_snake_case_field.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  invalid_enum_name.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InvalidEnum(Enum):
+    field_1_value = '1 value'
+    field_space = ' space'
+    field___special = '*- special'
+    schema = 'schema'
+    mro_1 = 'MRO'
+    mro_ = 'mro'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/invalid_import_name/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/invalid_import_name/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/invalid_import_name/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/invalid_import_name/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  invalid_import_name
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/invalid_import_name/array_commons_schema.py 0.45.0-1/tests/data/expected/main/jsonschema/invalid_import_name/array_commons_schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/invalid_import_name/array_commons_schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/invalid_import_name/array_commons_schema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  array-commons.schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List
+
+from pydantic import Field, RootModel
+
+
+class Commons(RootModel[Any]):
+    root: Any = Field(..., description='Commons objects', title='Commons')
+
+
+class DefaultArray(RootModel[List[Any]]):
+    root: List[Any] = Field(..., max_length=100, min_length=1)
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/invalid_import_name/products_schema.py 0.45.0-1/tests/data/expected/main/jsonschema/invalid_import_name/products_schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/invalid_import_name/products_schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/invalid_import_name/products_schema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  products.schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import Field, RootModel
+
+from . import array_commons_schema
+
+
+class Products(RootModel[array_commons_schema.DefaultArray]):
+    root: array_commons_schema.DefaultArray = Field(
+        ..., description='The products in the catalog', title='Products'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/invalid_model_name.py 0.45.0-1/tests/data/expected/main/jsonschema/invalid_model_name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/invalid_model_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/invalid_model_name.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  invalid_model_name.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel, Field, conint
+
+
+class ValidModelName(BaseModel):
+    firstName: Optional[str] = Field(None, description="The person's first name.")
+    lastName: Optional[str] = Field(None, description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[List[Any]] = None
+    comment: None = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/items_boolean.py 0.45.0-1/tests/data/expected/main/jsonschema/items_boolean.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/items_boolean.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/items_boolean.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  items_boolean.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    example: Optional[List[Any]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/json_capitalise_enum_members.py 0.45.0-1/tests/data/expected/main/jsonschema/json_capitalise_enum_members.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/json_capitalise_enum_members.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/json_capitalise_enum_members.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  many_case_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class Model(Enum):
+    SNAKE_CASE = 'snake_case'
+    CAP_CASE = 'CAP_CASE'
+    CAMEL_CASE = 'CamelCase'
+    UPPERCASE = 'UPPERCASE'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/json_pointer.py 0.45.0-1/tests/data/expected/main/jsonschema/json_pointer.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/json_pointer.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/json_pointer.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,42 @@
+# generated by datamodel-codegen:
+#   filename:  json_pointer.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Optional, Union
+
+from pydantic import BaseModel, Extra, Field
+
+
+class Pets(BaseModel):
+    __root__: Any
+
+
+class PetType(Enum):
+    Cat = 'Cat'
+
+
+class Cat(BaseModel):
+    pet_type: PetType
+    hunts: bool
+    age: str
+
+
+class PetType1(Enum):
+    Dog = 'Dog'
+
+
+class Dog(BaseModel):
+    pet_type: PetType1
+    bark: bool
+    breed: str
+
+
+class Person(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    name: Optional[str] = Field(None, title='name')
+    pet: Optional[Union[Cat, Dog]] = Field(None, title='pet')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/json_pointer_array.py 0.45.0-1/tests/data/expected/main/jsonschema/json_pointer_array.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/json_pointer_array.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/json_pointer_array.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+# generated by datamodel-codegen:
+#   filename:  json_pointer_array.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Union
+
+from pydantic import BaseModel, EmailStr
+
+
+class Email(BaseModel):
+    email: EmailStr
+
+
+class Error(BaseModel):
+    code: str
+
+
+class Model1(BaseModel):
+    emails: List[Email]
+
+
+class Model2(BaseModel):
+    errors: List[Error]
+
+
+class Model(BaseModel):
+    __root__: Union[Model1, Model2]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/json_reuse_enum.py 0.45.0-1/tests/data/expected/main/jsonschema/json_reuse_enum.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/json_reuse_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/json_reuse_enum.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,34 @@
+# generated by datamodel-codegen:
+#   filename:  duplicate_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Animal(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class RedistributeEnum(Enum):
+    static = 'static'
+    connected = 'connected'
+
+
+class User(BaseModel):
+    name: Optional[str] = None
+    animal: Optional[Animal] = 'dog'
+    pet: Optional[Animal] = 'cat'
+    redistribute: Optional[List[RedistributeEnum]] = None
+
+
+class Redistribute(BaseModel):
+    __root__: List[RedistributeEnum] = Field(
+        ..., description='Redistribute type for routes.'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/json_reuse_enum_default_member.py 0.45.0-1/tests/data/expected/main/jsonschema/json_reuse_enum_default_member.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/json_reuse_enum_default_member.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/json_reuse_enum_default_member.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,34 @@
+# generated by datamodel-codegen:
+#   filename:  duplicate_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Animal(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class RedistributeEnum(Enum):
+    static = 'static'
+    connected = 'connected'
+
+
+class User(BaseModel):
+    name: Optional[str] = None
+    animal: Optional[Animal] = Animal.dog
+    pet: Optional[Animal] = Animal.cat
+    redistribute: Optional[List[RedistributeEnum]] = None
+
+
+class Redistribute(BaseModel):
+    __root__: List[RedistributeEnum] = Field(
+        ..., description='Redistribute type for routes.'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_allof_enum_no_external_ref.py 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_allof_enum_no_external_ref.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_allof_enum_no_external_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_allof_enum_no_external_ref.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  allof_enum_no_external_ref.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class MassUnit(Enum):
+    g = 'g'
+    kg = 'kg'
+    t = 't'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_allof_enum_ref.py 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_allof_enum_ref.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_allof_enum_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_allof_enum_ref.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,28 @@
+# generated by datamodel-codegen:
+#   filename:  allof_enum_ref.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Optional
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class MassUnit(Enum):
+    g = 'g'
+    kg = 'kg'
+    t = 't'
+
+
+class QuantityTrunc(BaseModel):
+    __root__: MassUnit
+
+
+class CreateOrderByEstimateRequest(BaseModel):
+    quantity_trunc: Optional[QuantityTrunc] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_collapse_root_models_empty_union.py 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_collapse_root_models_empty_union.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_collapse_root_models_empty_union.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_collapse_root_models_empty_union.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  collapse_root_models_empty_union.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    field: Any = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_collapse_root_models_nested_reference.py 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_collapse_root_models_nested_reference.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_collapse_root_models_nested_reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_collapse_root_models_nested_reference.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  collapse_root_models_nested_reference.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import BaseModel
+
+
+class SomeRef(BaseModel):
+    id: str
+
+
+class Model(BaseModel):
+    refs: List[SomeRef]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_collapse_root_models_with_optional.py 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_collapse_root_models_with_optional.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_collapse_root_models_with_optional.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_collapse_root_models_with_optional.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  collapse_root_models_with_optional.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    field: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_empty_items_array.py 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_empty_items_array.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_empty_items_array.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_empty_items_array.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  empty_items_array.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    datum_kwargs: Optional[Dict[str, List[Any]]] = None
+    simple_list: Optional[List[Any]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_enum_object_values.py 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_enum_object_values.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_enum_object_values.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_enum_object_values.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,43 @@
+# generated by datamodel-codegen:
+#   filename:  enum_object_values.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class CountryWithTitle(Enum):
+    USA = {'title': 'USA', 'code': 'US'}
+    CANADA = {'title': 'CANADA', 'code': 'CA'}
+
+
+class CountryWithName(Enum):
+    USA = {'name': 'USA', 'code': 'US'}
+    CANADA = {'name': 'CANADA', 'code': 'CA'}
+
+
+class CountryWithConst(Enum):
+    us_value = {'const': 'us_value'}
+    ca_value = {'const': 'ca_value'}
+
+
+class CountryNoIdentifier(Enum):
+    value_0 = {'code': 'US'}
+    value_1 = {'code': 'CA'}
+
+
+class CountryDuplicateTitle(Enum):
+    SAME = {'title': 'SAME', 'code': 'US'}
+    SAME_1 = {'title': 'SAME', 'code': 'CA'}
+
+
+class EnumObjectTest(BaseModel):
+    country_with_title: Optional[CountryWithTitle] = None
+    country_with_name: Optional[CountryWithName] = None
+    country_with_const: Optional[CountryWithConst] = None
+    country_no_identifier: Optional[CountryNoIdentifier] = None
+    country_duplicate_title: Optional[CountryDuplicateTitle] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_hierarchical_aliases_scoped.py 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_hierarchical_aliases_scoped.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_hierarchical_aliases_scoped.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_hierarchical_aliases_scoped.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  hierarchical_aliases.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class User(BaseModel):
+    user_name: Optional[str] = Field(None, alias='name')
+    id: Optional[int] = None
+
+
+class Address(BaseModel):
+    address_name: Optional[str] = Field(None, alias='name')
+    city: Optional[str] = None
+
+
+class Root(BaseModel):
+    root_name: Optional[str] = Field(None, alias='name')
+    user: Optional[User] = Field(None, title='User')
+    address: Optional[Address] = Field(None, title='Address')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_multiple_types_with_object.py 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_multiple_types_with_object.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_multiple_types_with_object.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_multiple_types_with_object.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,31 @@
+# generated by datamodel-codegen:
+#   filename:  multiple_types_with_object.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel
+
+
+class External(BaseModel):
+    name: Optional[str] = None
+
+
+class Config(BaseModel):
+    value: Optional[int] = None
+
+
+class TopLevelMultiType1(BaseModel):
+    enabled: Optional[bool] = None
+
+
+class TopLevelMultiType(BaseModel):
+    __root__: Union[TopLevelMultiType1, bool]
+
+
+class Model(BaseModel):
+    external: Optional[Union[External, bool]] = None
+    config: Optional[Union[Optional[Config], str]] = None
+    top_level_ref: Optional[TopLevelMultiType] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_root_model_ordering.py 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_root_model_ordering.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_root_model_ordering.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_root_model_ordering.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,42 @@
+# generated by datamodel-codegen:
+#   filename:  root_model_ordering.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, Optional, Union
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class Zoo(BaseModel):
+    animals: Optional[List[Animals]] = Field(default_factory=list, title='Animals')
+
+
+class Dog(BaseModel):
+    name: Literal['dog'] = Field('dog', title='woof')
+    friends: Optional[List[Friends]] = Field(default_factory=list, title='Friends')
+
+
+class Cat(BaseModel):
+    name: Literal['cat'] = Field('cat', title='meow')
+    friends: Optional[List[Friends]] = Field(default_factory=list, title='Friends')
+
+
+class Bird(BaseModel):
+    name: Literal['bird'] = Field('bird', title='chirp')
+    friends: Optional[List[Friends]] = Field(default_factory=list, title='Friends')
+
+
+class Animals(RootModel[Union[Dog, Cat, Bird]]):
+    root: Union[Dog, Cat, Bird] = Field(..., discriminator='name', title='Animal')
+
+
+class Friends(RootModel[Union[Dog, Cat, Bird]]):
+    root: Union[Dog, Cat, Bird] = Field(..., discriminator='name', title='Animal')
+
+
+Zoo.model_rebuild()
+Dog.model_rebuild()
+Cat.model_rebuild()
+Bird.model_rebuild()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_root_model_ordering_keep_model_order.py 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_root_model_ordering_keep_model_order.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/jsonschema_root_model_ordering_keep_model_order.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/jsonschema_root_model_ordering_keep_model_order.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,41 @@
+# generated by datamodel-codegen:
+#   filename:  root_model_ordering.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, Optional, Union
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class Bird(BaseModel):
+    name: Literal['bird'] = Field('bird', title='chirp')
+    friends: Optional[List[Friends]] = Field(default_factory=list, title='Friends')
+
+
+class Cat(BaseModel):
+    name: Literal['cat'] = Field('cat', title='meow')
+    friends: Optional[List[Friends]] = Field(default_factory=list, title='Friends')
+
+
+class Dog(BaseModel):
+    name: Literal['dog'] = Field('dog', title='woof')
+    friends: Optional[List[Friends]] = Field(default_factory=list, title='Friends')
+
+
+class Animals(RootModel[Union[Dog, Cat, Bird]]):
+    root: Union[Dog, Cat, Bird] = Field(..., discriminator='name', title='Animal')
+
+
+class Friends(RootModel[Union[Dog, Cat, Bird]]):
+    root: Union[Dog, Cat, Bird] = Field(..., discriminator='name', title='Animal')
+
+
+class Zoo(BaseModel):
+    animals: Optional[List[Animals]] = Field(default_factory=list, title='Animals')
+
+
+Bird.model_rebuild()
+Cat.model_rebuild()
+Dog.model_rebuild()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/keep_model_order_field_references.py 0.45.0-1/tests/data/expected/main/jsonschema/keep_model_order_field_references.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/keep_model_order_field_references.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/keep_model_order_field_references.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  keep_model_order_field_references.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Metadata(BaseModel):
+    title: Optional[str] = None
+
+
+class DescriptionType(BaseModel):
+    metadata: Optional[List[Metadata]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/long_description.py 0.45.0-1/tests/data/expected/main/jsonschema/long_description.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/long_description.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/long_description.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  long_description.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class LongDescription(BaseModel):
+    summary: Optional[str] = Field(None, description='summary for object')
+    description: Optional[str] = Field(
+        None,
+        description='datamodel-code-generator. This code generator creates pydantic model from an openapi file and others.',
+    )
+    multi_line: Optional[str] = Field(
+        None,
+        description='datamodel-code-generator\nThis code generator creates pydantic model from an openapi file and others.\n\n\nSupported source types\nOpenAPI 3 (YAML/JSON, OpenAPI Data Type)\nJSON Schema (JSON Schema Core/JSON Schema Validation)\nJSON/YAML/CSV Data (it will be converted to JSON Schema)\nPython dictionary (it will be converted to JSON Schema)',
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/long_description_wrap_string_literal.py 0.45.0-1/tests/data/expected/main/jsonschema/long_description_wrap_string_literal.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/long_description_wrap_string_literal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/long_description_wrap_string_literal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,30 @@
+# generated by datamodel-codegen:
+#   filename:  long_description.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class LongDescription(BaseModel):
+    summary: Optional[str] = Field(None, description='summary for object')
+    description: Optional[str] = Field(
+        None,
+        description=(
+            'datamodel-code-generator. This code generator creates pydantic model from'
+            ' an openapi file and others.'
+        ),
+    )
+    multi_line: Optional[str] = Field(
+        None,
+        description=(
+            'datamodel-code-generator\nThis code generator creates pydantic model from'
+            ' an openapi file and others.\n\n\nSupported source types\nOpenAPI 3'
+            ' (YAML/JSON, OpenAPI Data Type)\nJSON Schema (JSON Schema Core/JSON Schema'
+            ' Validation)\nJSON/YAML/CSV Data (it will be converted to JSON'
+            ' Schema)\nPython dictionary (it will be converted to JSON Schema)'
+        ),
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/main_root_one_of/bar.py 0.45.0-1/tests/data/expected/main/jsonschema/main_root_one_of/bar.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/main_root_one_of/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/main_root_one_of/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  bar.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class JobRun(BaseModel):
+    enabled: Optional[bool] = Field(False, description='If Live Execution is Enabled.')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/main_root_one_of/foo.py 0.45.0-1/tests/data/expected/main/jsonschema/main_root_one_of/foo.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/main_root_one_of/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/main_root_one_of/foo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  foo.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class JobRun(BaseModel):
+    enabled: Optional[bool] = Field(False, description='If Live Execution is enabled')
+    resources: Optional[List[str]] = Field(
+        None, description='Resource full classname to register to extend any endpoints.'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/main_root_one_of/union.py 0.45.0-1/tests/data/expected/main/jsonschema/main_root_one_of/union.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/main_root_one_of/union.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/main_root_one_of/union.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  union.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel, Field
+
+from . import bar, foo
+
+
+class ExecutionContext(BaseModel):
+    __root__: Union[foo.JobRun, bar.JobRun] = Field(
+        ..., description='Execution Configuration.'
+    )
+
+
+class App(BaseModel):
+    runtime: Optional[ExecutionContext] = Field(
+        None, description='Execution Configuration.'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/modular_default_enum_member/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/modular_default_enum_member/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/modular_default_enum_member/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/modular_default_enum_member/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular_default_enum_member
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/modular_default_enum_member/bar.py 0.45.0-1/tests/data/expected/main/jsonschema/modular_default_enum_member/bar.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/modular_default_enum_member/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/modular_default_enum_member/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  bar.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+from pydantic import BaseModel
+
+
+class Bar(BaseModel):
+    pass
+
+
+class LogLevels(Enum):
+    DEBUG = 'DEBUG'
+    INFO = 'INFO'
+    WARN = 'WARN'
+    ERROR = 'ERROR'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/modular_default_enum_member/foo.py 0.45.0-1/tests/data/expected/main/jsonschema/modular_default_enum_member/foo.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/modular_default_enum_member/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/modular_default_enum_member/foo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  foo.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+from . import bar
+from .nested_bar import bar as bar_1
+
+
+class Foo(BaseModel):
+    loggerLevel: Optional[bar.LogLevels] = bar.LogLevels.INFO
+    AnotherLoggerLevel: Optional[bar_1.LogLevels] = bar_1.LogLevels.ERROR
+    OtherLoggerLevels: Optional[List[bar_1.LogLevels]] = [
+        bar_1.LogLevels.INFO,
+        bar_1.LogLevels.ERROR,
+    ]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/modular_default_enum_member/nested_bar/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/modular_default_enum_member/nested_bar/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/modular_default_enum_member/nested_bar/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/modular_default_enum_member/nested_bar/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular_default_enum_member
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/modular_default_enum_member/nested_bar/bar.py 0.45.0-1/tests/data/expected/main/jsonschema/modular_default_enum_member/nested_bar/bar.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/modular_default_enum_member/nested_bar/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/modular_default_enum_member/nested_bar/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  nested_bar/bar.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+from pydantic import BaseModel
+
+
+class NestedBar(BaseModel):
+    pass
+
+
+class LogLevels(Enum):
+    DEBUG = 'DEBUG'
+    INFO = 'INFO'
+    ERROR = 'ERROR'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/module_split_single/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/module_split_single/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/module_split_single/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/module_split_single/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  input.json
+
+from __future__ import annotations
+
+from .model import Model
+from .order import Order
+from .user import User
+
+__all__ = [
+    "Model",
+    "Order",
+    "User",
+]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/module_split_single/model.py 0.45.0-1/tests/data/expected/main/jsonschema/module_split_single/model.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/module_split_single/model.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/module_split_single/model.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  input.json
+
+from __future__ import annotations
+
+from typing import Any
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/module_split_single/order.py 0.45.0-1/tests/data/expected/main/jsonschema/module_split_single/order.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/module_split_single/order.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/module_split_single/order.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  input.json
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .user import User
+
+
+class Order(BaseModel):
+    id: Optional[int] = None
+    user: Optional[User] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/module_split_single/user.py 0.45.0-1/tests/data/expected/main/jsonschema/module_split_single/user.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/module_split_single/user.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/module_split_single/user.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  input.json
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class User(BaseModel):
+    id: Optional[int] = None
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/msgspec_falsy_defaults.py 0.45.0-1/tests/data/expected/main/jsonschema/msgspec_falsy_defaults.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/msgspec_falsy_defaults.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/msgspec_falsy_defaults.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  msgspec_falsy_defaults.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from msgspec import Struct, UnsetType
+
+
+class Model(Struct):
+    zero_int: int | UnsetType = 0
+    empty_string: str | UnsetType = ''
+    false_bool: bool | UnsetType = False
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/msgspec_null_field.py 0.45.0-1/tests/data/expected/main/jsonschema/msgspec_null_field.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/msgspec_null_field.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/msgspec_null_field.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  msgspec_null_field.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from msgspec import UNSET, Struct, UnsetType
+
+
+class Model(Struct):
+    required_null: None
+    optional_null: UnsetType = UNSET
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  multiple_files
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/file_a.py 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files/file_a.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/file_a.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files/file_a.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  file_a.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import file_b
+
+
+class ModelA(BaseModel):
+    firstName: Optional[str] = None
+    modelB: Optional[file_b.ModelB] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/file_b.py 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files/file_b.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/file_b.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files/file_b.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  file_b.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class ModelB(BaseModel):
+    metadata: str
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/file_c.py 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files/file_c.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/file_c.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files/file_c.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  file_c.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import file_b
+
+
+class ModelC(BaseModel):
+    firstName: Optional[str] = None
+    modelB: Optional[file_b.ModelB] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/file_d.py 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files/file_d.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files/file_d.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files/file_d.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  file_d.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import file_a
+
+
+class ModelD(BaseModel):
+    firstName: Optional[str] = None
+    modelA: Optional[file_a.ModelA] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_json_pointer/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files_json_pointer/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_json_pointer/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files_json_pointer/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  multiple_files_json_pointer
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_a.py 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_a.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_a.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_a.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  file_a.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel, Extra, Field
+
+from . import file_b
+
+
+class PersonA(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    name: Optional[str] = Field(None, title='name')
+    pet: Optional[Union[file_b.Cat, file_b.Dog]] = Field(None, title='pet')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_b.py 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_b.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_b.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_b.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,41 @@
+# generated by datamodel-codegen:
+#   filename:  file_b.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any
+
+from pydantic import BaseModel, Extra
+
+
+class Model(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    __root__: Any
+
+
+class Pets(BaseModel):
+    __root__: Any
+
+
+class PetType(Enum):
+    Cat = 'Cat'
+
+
+class Cat(BaseModel):
+    pet_type: PetType
+    hunts: bool
+    age: str
+
+
+class PetType1(Enum):
+    Dog = 'Dog'
+
+
+class Dog(BaseModel):
+    pet_type: PetType1
+    bark: bool
+    breed: str
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_c.py 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_c.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_c.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files_json_pointer/file_c.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  file_c.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel, Extra, Field
+
+from . import file_b
+
+
+class PersonC(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    name: Optional[str] = Field(None, title='name')
+    pet: Optional[Union[file_b.Cat, file_b.Dog]] = Field(None, title='pet')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_self_ref/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files_self_ref/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_self_ref/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files_self_ref/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  multiple_files_self_ref
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_self_ref/base_test.py 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files_self_ref/base_test.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_self_ref/base_test.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files_self_ref/base_test.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  base_test.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    pass
+
+
+class Second(BaseModel):
+    __root__: str
+
+
+class First(BaseModel):
+    __root__: Second
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_self_ref/test.py 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files_self_ref/test.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_self_ref/test.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files_self_ref/test.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  test.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+from . import base_test
+
+
+class Model(BaseModel):
+    test_id: str = Field(..., description='test ID')
+    test_ip: base_test.First
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_self_ref_single.py 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files_self_ref_single.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/multiple_files_self_ref_single.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/multiple_files_self_ref_single.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  test.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Second(BaseModel):
+    __root__: str
+
+
+class First(BaseModel):
+    __root__: Second
+
+
+class Model(BaseModel):
+    test_id: str = Field(..., description='test ID')
+    test_ip: First
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_all_of.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_all_of.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_all_of.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_all_of.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  nested_all_of.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    first: str
+    second: str
+    third: str
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_deep/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_deep/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_deep/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_deep/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  nested_person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .empty_parent.nested import deep as deep_1
+from .nested import deep
+
+
+class NestedPerson(BaseModel):
+    nested_deep_childJson: Optional[deep.Json] = None
+    nested_deep_childAnother: Optional[deep.Another] = None
+    empty_parent_nested_deep_childJson: Optional[deep_1.Json] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_deep/empty_parent/nested/deep.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_deep/empty_parent/nested/deep.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_deep/empty_parent/nested/deep.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_deep/empty_parent/nested/deep.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  nested_person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Json(BaseModel):
+    firstName: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_deep/nested/deep.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_deep/nested/deep.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_deep/nested/deep.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_deep/nested/deep.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  nested_person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Json(BaseModel):
+    firstName: Optional[str] = None
+
+
+class Another(BaseModel):
+    firstName: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/_internal.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/_internal.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import BaseModel, Field, conint
+
+from .definitions import food as food_1
+from .definitions import friends as friends_1
+from .definitions.drink import coffee, tea
+from .definitions.relative.animal.pet import pet as pet_1
+from .definitions.relative.animal.pet.pet import Pet
+
+
+class Robot(Pet):
+    friends: Optional[Person] = None
+    drink: Optional[coffee.Coffee] = None
+    food: Optional[food_1.Noodle] = None
+    pet: Optional[pet_1.Pet] = None
+
+
+class Person(BaseModel):
+    first_name: str = Field(..., description="The person's first name.")
+    last_name: str = Field(..., description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(None, description='Age in years.')
+    pets: Optional[List[pet_1.Pet]] = None
+    friends: Optional[friends_1.Friends] = None
+    robot: Optional[Robot] = None
+    comment: None = None
+    drink: Optional[List[Union[coffee.Coffee, tea.Tea]]] = None
+    food: Optional[List[Union[food_1.Noodle, food_1.Soup]]] = None
+
+
+Person.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/coffee.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/coffee.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/coffee.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/coffee.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/drink/coffee.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class Coffee(Enum):
+    Black = 'Black'
+    Espresso = 'Espresso'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/tea.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/tea.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/tea.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/drink/tea.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/drink/tea.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class Tea(Enum):
+    Oolong = 'Oolong'
+    Green = 'Green'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/food.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/food.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/food.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/food.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/food.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Noodle(Enum):
+    ramen = 'ramen'
+    spaghetti = 'spaghetti'
+
+
+class Soup(Enum):
+    bean = 'bean'
+    mushroom = 'mushroom'
+    tomato = 'tomato'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/friends.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/friends.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/friends.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/friends.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/friends.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import BaseModel, Extra, Field
+
+from . import food as food_1
+
+
+class Friend(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    name: str = Field(..., example='John Doe')
+    phone_number: Optional[str] = Field(None, example='(555) 555-1234')
+    food: Optional[List[Union[food_1.Noodle, food_1.Soup]]] = None
+
+
+class Friends(BaseModel):
+    __root__: List[Friend] = Field(..., title='Friends')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/machine/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/machine/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/machine/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/machine/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/machine/robot.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/machine/robot.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/machine/robot.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/machine/robot.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from ..._internal import Robot
+
+__all__ = ["Robot"]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/fur.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/fur.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/fur.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/fur.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/relative/animal/fur.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class Fur(Enum):
+    Short_hair = 'Short hair'
+    Long_hair = 'Long hair'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/pet/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/pet/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/pet/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/pet/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/pet/pet.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/pet/pet.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/pet/pet.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/definitions/relative/animal/pet/pet.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/relative/animal/pet/pet.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import fur as fur_1
+
+
+class Pet(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+    fur: Optional[fur_1.Fur] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/person.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/person.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_directory/person.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_directory/person.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from ._internal import Person
+
+__all__ = ["Person"]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_json_pointer.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_json_pointer.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_json_pointer.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_json_pointer.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,68 @@
+# generated by datamodel-codegen:
+#   filename:  nested_json_pointer.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Optional, Union
+
+from pydantic import BaseModel, Extra, Field
+
+
+class CatBreed(BaseModel):
+    __root__: Any
+
+
+class DogBreed(BaseModel):
+    __root__: Any
+
+
+class Pets(BaseModel):
+    __root__: Any
+
+
+class PetType(Enum):
+    Cat = 'Cat'
+
+
+class PetType1(Enum):
+    Dog = 'Dog'
+
+
+class C1(BaseModel):
+    hunts: Optional[bool] = None
+    age: Optional[str] = None
+
+
+class C2(BaseModel):
+    hunts: Optional[bool] = None
+    age: Optional[str] = None
+
+
+class D1(BaseModel):
+    bark: Optional[bool] = None
+    age: Optional[str] = None
+
+
+class D2(BaseModel):
+    hunts: Optional[bool] = None
+    age: Optional[str] = None
+
+
+class Cat(BaseModel):
+    pet_type: PetType
+    breed: Optional[Union[C1, C2]] = Field(None, title='breed')
+
+
+class Dog(BaseModel):
+    pet_type: PetType1
+    breed: Union[D1, D2] = Field(..., title='breed')
+
+
+class Person(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    name: Optional[str] = Field(None, title='name')
+    pet: Optional[Union[Cat, Dog]] = Field(None, title='pet')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_skip/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_skip/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  nested_skip.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class NestedSkip(BaseModel):
+    pass
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/a/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_skip/a/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/a/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_skip/a/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  nested_skip.json
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/a/b/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_skip/a/b/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/a/b/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_skip/a/b/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  nested_skip.json
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/a/b/c/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_skip/a/b/c/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/a/b/c/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_skip/a/b/c/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  nested_skip.json
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/a/b/c/d.py 0.45.0-1/tests/data/expected/main/jsonschema/nested_skip/a/b/c/d.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nested_skip/a/b/c/d.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nested_skip/a/b/c/d.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  nested_skip.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class E(BaseModel):
+    example1: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/null.py 0.45.0-1/tests/data/expected/main/jsonschema/null.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/null.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/null.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  null.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    null: None = None
+    nullableString: Optional[str] = None
+    nullableNumber: Optional[Union[float, int]] = None
+    any: Optional[Any] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/null_and_array.py 0.45.0-1/tests/data/expected/main/jsonschema/null_and_array.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/null_and_array.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/null_and_array.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  null_and_array.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel
+
+
+class MyObjItem(BaseModel):
+    items: Optional[List[Any]]
+
+
+class Model(BaseModel):
+    my_obj: List[MyObjItem]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/null_and_array_v2.py 0.45.0-1/tests/data/expected/main/jsonschema/null_and_array_v2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/null_and_array_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/null_and_array_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  null_and_array.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel
+
+
+class MyObjItem(BaseModel):
+    items: Optional[List[Any]] = None
+
+
+class Model(BaseModel):
+    my_obj: List[MyObjItem]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nullable_any_of.py 0.45.0-1/tests/data/expected/main/jsonschema/nullable_any_of.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nullable_any_of.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nullable_any_of.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,31 @@
+# generated by datamodel-codegen:
+#   filename:  nullable_any_of.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Extra, Field
+
+
+class Config(BaseModel):
+    __root__: str = Field(..., description='d2', min_length=1, title='t2')
+
+
+class In(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    input_dataset_path: Optional[str] = Field(
+        None, description='d1', min_length=1, title='Path to the input dataset'
+    )
+    config: Optional[Config] = None
+
+
+class ValidatingSchemaId1(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    in_: Optional[In] = Field(None, alias='in')
+    n1: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nullable_any_of_use_union_operator.py 0.45.0-1/tests/data/expected/main/jsonschema/nullable_any_of_use_union_operator.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nullable_any_of_use_union_operator.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nullable_any_of_use_union_operator.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+# generated by datamodel-codegen:
+#   filename:  nullable_any_of.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Extra, Field
+
+
+class Config(BaseModel):
+    __root__: str = Field(..., description='d2', min_length=1, title='t2')
+
+
+class In(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    input_dataset_path: str | None = Field(
+        None, description='d1', min_length=1, title='Path to the input dataset'
+    )
+    config: Config | None = None
+
+
+class ValidatingSchemaId1(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    in_: In | None = Field(None, alias='in')
+    n1: int | None = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/nullable_object.py 0.45.0-1/tests/data/expected/main/jsonschema/nullable_object.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/nullable_object.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/nullable_object.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  nullable_object.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, Optional
+
+from pydantic import BaseModel, Extra, constr
+
+
+class Network(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    name: Optional[str] = None
+
+
+class Model(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    networks: Dict[constr(regex=r'^[a-zA-Z0-9._-]+$'), Optional[Network]]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/object_has_one_of.py 0.45.0-1/tests/data/expected/main/jsonschema/object_has_one_of.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/object_has_one_of.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/object_has_one_of.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,61 @@
+# generated by datamodel-codegen:
+#   filename:  object_has_one_of.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Union
+
+from pydantic import BaseModel, Extra, Field
+
+
+class Field1(Enum):
+    response_1 = 'response_1'
+
+
+class Field2(Enum):
+    response_a = 'response_a'
+
+
+class V2Test1(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    field_1: Field1
+    field_2: Field2
+
+
+class Field11(Enum):
+    response_2 = 'response_2'
+
+
+class Field21(Enum):
+    response_b = 'response_b'
+
+
+class V2Test2(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    field_1: Field11
+    field_2: Field21
+
+
+class Field22(Enum):
+    response_c = 'response_c'
+
+
+class V2Test3(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    field_1: Field11
+    field_2: Field22
+
+
+class V2Test(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    __root__: Union[V2Test1, Union[V2Test2, V2Test3]] = Field(..., title='v2_test')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/one_of_with_sub_schema_array_item.py 0.45.0-1/tests/data/expected/main/jsonschema/one_of_with_sub_schema_array_item.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/one_of_with_sub_schema_array_item.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/one_of_with_sub_schema_array_item.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  one_of_with_sub_schema_array_item.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class SpatialPlan(BaseModel):
+    officialDocument: Optional[Union[str, List[AnyUrl]]] = Field(
+        None,
+        description='Link to the official documents that relate to the spatial plan.',
+        title='officialDocument',
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum.py 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  oneof_const_enum.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class NodejsMode(Enum):
+    npm = 'npm'
+    yarn = 'yarn'
+    npm_ci = 'npm_ci'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_bool.py 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_bool.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_bool.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_bool.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  oneof_const_enum_bool.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class BooleanFlag(Enum):
+    boolean_True = True
+    boolean_False = False
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_float.py 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_float.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_float.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_float.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  oneof_const_enum_float.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class Ratio(Enum):
+    number_0_5 = 0.5
+    number_1_0 = 1.0
+    number_1_5 = 1.5
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_infer_type.py 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_infer_type.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_infer_type.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_infer_type.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  oneof_const_enum_infer_type.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class InferredType(Enum):
+    value1 = 'value1'
+    value2 = 'value2'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_int.py 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_int.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_int.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_int.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  oneof_const_enum_int.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import IntEnum
+
+
+class StatusCode(IntEnum):
+    integer_200 = 200
+    integer_404 = 404
+    integer_500 = 500
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_literal.py 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_literal.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_literal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_literal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  oneof_const_enum.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal
+
+from pydantic import BaseModel, Field
+
+
+class NodejsMode(BaseModel):
+    __root__: Literal['npm', 'yarn', 'npm_ci'] = Field(..., title='NodeJS mode')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_nested.py 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_nested.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_nested.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_nested.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  oneof_const_enum_nested.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Mode(Enum):
+    fast = 'fast'
+    slow = 'slow'
+
+
+class Mode1(Enum):
+    a = 'a'
+    b = 'b'
+
+
+class Config(BaseModel):
+    mode: Optional[Mode] = Field(None, title='Mode')
+    modes: Optional[List[Mode1]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_nested_literal.py 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_nested_literal.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_nested_literal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_nested_literal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  oneof_const_enum_nested.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Config(BaseModel):
+    mode: Optional[Literal['fast', 'slow']] = Field(None, title='Mode')
+    modes: Optional[List[Literal['a', 'b']]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_nullable.py 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_nullable.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_nullable.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_nullable.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  oneof_const_enum_nullable.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class OptionalModeEnum(Enum):
+    enabled = 'enabled'
+    disabled = 'disabled'
+
+
+class OptionalMode(BaseModel):
+    __root__: Optional[OptionalModeEnum] = Field(None, title='Optional Mode')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_object.py 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_object.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_object.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_object.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  oneof_const_enum_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class ObjectConst(Enum):
+    value_0 = {'key': 'value1'}
+    value_1 = {'key': 'value2'}
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_type_list.py 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_type_list.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_type_list.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_type_list.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  oneof_const_enum_type_list.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class ModeWithNullEnum(Enum):
+    on = 'on'
+    off = 'off'
+
+
+class ModeWithNull(BaseModel):
+    __root__: Optional[ModeWithNullEnum] = Field(None, title='Mode With Null')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_type_list_no_null.py 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_type_list_no_null.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_enum_type_list_no_null.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_enum_type_list_no_null.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  oneof_const_enum_type_list_no_null.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class MultiType(Enum):
+    value1 = 'value1'
+    value2 = 'value2'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_mixed_with_ref.py 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_mixed_with_ref.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_mixed_with_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_mixed_with_ref.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  oneof_const_mixed_with_ref.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class SomeType(BaseModel):
+    name: Optional[str] = None
+
+
+class MixedUnion(BaseModel):
+    __root__: Union[str, SomeType] = Field(..., title='MixedUnion')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_with_properties.py 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_with_properties.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/oneof_const_with_properties.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/oneof_const_with_properties.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  oneof_const_with_properties.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class ConstWithProps1(BaseModel):
+    invalid: Optional[Any] = None
+
+
+class ConstWithProps(BaseModel):
+    __root__: Union[ConstWithProps1, str] = Field(..., title='ConstWithProps')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/pattern.py 0.45.0-1/tests/data/expected/main/jsonschema/pattern.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/pattern.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/pattern.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  pattern.json
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, constr
+
+
+class Info(BaseModel):
+    hostName: Optional[
+        constr(
+            regex=r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])\Z'
+        )
+    ] = None
+    arn: Optional[
+        constr(regex=r'(^arn:([^:]*):([^:]*):([^:]*):(|\*|[\d]{12}):(.+)$)|^\*$')
+    ] = None
+    tel: Optional[constr(regex=r'^(\([0-9]{3}\))?[0-9]{3}-[0-9]{4}$')] = None
+    comment: Optional[constr(regex=r'[^\b\f\n\r\t\\a+.?\'"|()]+$')] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/pattern_properties.py 0.45.0-1/tests/data/expected/main/jsonschema/pattern_properties.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/pattern_properties.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/pattern_properties.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  pattern_properties.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, Optional
+
+from pydantic import BaseModel, constr
+
+
+class Bar(BaseModel):
+    name: Optional[str] = None
+
+
+class Foo(BaseModel):
+    bar: Dict[constr(regex=r'^([a-zA-Z_][a-zA-Z0-9_]*)$'), Bar]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/pattern_properties_by_reference.py 0.45.0-1/tests/data/expected/main/jsonschema/pattern_properties_by_reference.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/pattern_properties_by_reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/pattern_properties_by_reference.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,34 @@
+# generated by datamodel-codegen:
+#   filename:  pattern_properties_by_reference.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, Optional
+
+from pydantic import BaseModel, Extra, Field, constr
+
+
+class Stt(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    timeout: Optional[float] = Field(None, title='STT Timeout')
+
+
+class TextResponse(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    __root__: Dict[constr(regex=r'^[a-z]{1}[0-9]{1}$'), Any]
+
+
+class SomeschemaSchema(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    KeyWithExplicitPatternProperties: Optional[
+        Dict[constr(regex=r'^[a-z]{1}[0-9]{1}$'), Any]
+    ] = None
+    KeyWithPatternPropertiesByReference: Optional[TextResponse] = None
+    SomeOtherBoringReference: Optional[Stt] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/pattern_properties_field_constraints.py 0.45.0-1/tests/data/expected/main/jsonschema/pattern_properties_field_constraints.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/pattern_properties_field_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/pattern_properties_field_constraints.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  pattern_properties.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, Optional
+
+from pydantic import BaseModel
+
+
+class Bar(BaseModel):
+    name: Optional[str] = None
+
+
+class Foo(BaseModel):
+    bar: Dict[str, Bar]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/pydantic_v2_model_rebuild_inheritance.py 0.45.0-1/tests/data/expected/main/jsonschema/pydantic_v2_model_rebuild_inheritance.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/pydantic_v2_model_rebuild_inheritance.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/pydantic_v2_model_rebuild_inheritance.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  pydantic_v2_model_rebuild_inheritance.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, RootModel
+
+
+class Base(BaseModel):
+    next: Optional[Base] = None
+
+
+class Derived(Base):
+    value: Optional[str] = None
+
+
+class Model(RootModel[Derived]):
+    root: Derived
+
+
+Base.model_rebuild()
+Derived.model_rebuild()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/ref_type_has_null.py 0.45.0-1/tests/data/expected/main/jsonschema/ref_type_has_null.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/ref_type_has_null.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/ref_type_has_null.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  ref_type_has_null.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class NullableString(BaseModel):
+    __root__: str | None
+
+
+class NonNullableString(BaseModel):
+    __root__: str
+
+
+class NullOnly(BaseModel):
+    __root__: None
+
+
+class Model(BaseModel):
+    nullableRef: NullableString | None
+    nonNullableRef: NonNullableString
+    nullOnlyRef: NullOnly | None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/ref_with_additional_keywords/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/ref_with_additional_keywords/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/ref_with_additional_keywords/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/ref_with_additional_keywords/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  ref_with_additional_keywords
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/ref_with_additional_keywords/commons_schema.py 0.45.0-1/tests/data/expected/main/jsonschema/ref_with_additional_keywords/commons_schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/ref_with_additional_keywords/commons_schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/ref_with_additional_keywords/commons_schema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  commons.schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List
+
+from pydantic import Field, RootModel
+
+
+class Commons(RootModel[Any]):
+    root: Any = Field(..., description='Commons objects', title='Commons')
+
+
+class DefaultArray(RootModel[List[Any]]):
+    root: List[Any] = Field(..., max_length=100, min_length=1)
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/ref_with_additional_keywords/products_schema.py 0.45.0-1/tests/data/expected/main/jsonschema/ref_with_additional_keywords/products_schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/ref_with_additional_keywords/products_schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/ref_with_additional_keywords/products_schema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  products.schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import Field, RootModel
+
+
+class Products(RootModel[List[str]]):
+    root: List[str] = Field(
+        ...,
+        description='The products in the catalog',
+        max_length=100,
+        min_length=1,
+        title='Products',
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/removed_parent_class.py 0.45.0-1/tests/data/expected/main/jsonschema/removed_parent_class.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/removed_parent_class.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/removed_parent_class.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  pet.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    name: str
+    age: int
+
+
+class Model(BaseModel):
+    Pet: Pet
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field/referenced.py 0.45.0-1/tests/data/expected/main/jsonschema/require_referenced_field/referenced.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field/referenced.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/require_referenced_field/referenced.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  referenced.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import datetime
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    some_optional_property: Optional[str] = None
+    some_optional_typed_property: Optional[datetime] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field/required.py 0.45.0-1/tests/data/expected/main/jsonschema/require_referenced_field/required.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field/required.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/require_referenced_field/required.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  required.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import datetime
+
+from .referenced import Model as Model_1
+
+
+class Model(Model_1):
+    some_optional_property: str
+    some_optional_typed_property: datetime
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_msgspec/referenced.py 0.45.0-1/tests/data/expected/main/jsonschema/require_referenced_field_msgspec/referenced.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_msgspec/referenced.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/require_referenced_field_msgspec/referenced.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  referenced.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Union
+
+from msgspec import UNSET, Struct, UnsetType
+
+
+class Model(Struct):
+    some_optional_property: Union[str, UnsetType] = UNSET
+    some_optional_typed_property: Union[str, UnsetType] = UNSET
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_msgspec/required.py 0.45.0-1/tests/data/expected/main/jsonschema/require_referenced_field_msgspec/required.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_msgspec/required.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/require_referenced_field_msgspec/required.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  required.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from .referenced import Model as Model_1
+
+
+class Model(Model_1):
+    some_optional_property: str
+    some_optional_typed_property: str
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_naivedatetime/referenced.py 0.45.0-1/tests/data/expected/main/jsonschema/require_referenced_field_naivedatetime/referenced.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_naivedatetime/referenced.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/require_referenced_field_naivedatetime/referenced.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  referenced.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, NaiveDatetime
+
+
+class Model(BaseModel):
+    some_optional_property: Optional[str] = None
+    some_optional_typed_property: Optional[NaiveDatetime] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_naivedatetime/required.py 0.45.0-1/tests/data/expected/main/jsonschema/require_referenced_field_naivedatetime/required.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_naivedatetime/required.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/require_referenced_field_naivedatetime/required.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  required.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import NaiveDatetime
+
+from .referenced import Model as Model_1
+
+
+class Model(Model_1):
+    some_optional_property: str
+    some_optional_typed_property: NaiveDatetime
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_pydantic_v2/referenced.py 0.45.0-1/tests/data/expected/main/jsonschema/require_referenced_field_pydantic_v2/referenced.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_pydantic_v2/referenced.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/require_referenced_field_pydantic_v2/referenced.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  referenced.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AwareDatetime, BaseModel
+
+
+class Model(BaseModel):
+    some_optional_property: Optional[str] = None
+    some_optional_typed_property: Optional[AwareDatetime] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_pydantic_v2/required.py 0.45.0-1/tests/data/expected/main/jsonschema/require_referenced_field_pydantic_v2/required.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/require_referenced_field_pydantic_v2/required.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/require_referenced_field_pydantic_v2/required.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  required.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import AwareDatetime
+
+from .referenced import Model as Model_1
+
+
+class Model(Model_1):
+    some_optional_property: str
+    some_optional_typed_property: AwareDatetime
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/required_and_any_of_required.py 0.45.0-1/tests/data/expected/main/jsonschema/required_and_any_of_required.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/required_and_any_of_required.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/required_and_any_of_required.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  required_and_any_of_required.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel
+
+
+class Foo(BaseModel):
+    bar: int
+    baz: int
+    qux: Optional[int] = None
+
+
+class Foo1(BaseModel):
+    bar: int
+    baz: Optional[int] = None
+    qux: int
+
+
+class Model(BaseModel):
+    foo: Union[Foo, Foo1]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reserved_field_name_schema_dataclass.py 0.45.0-1/tests/data/expected/main/jsonschema/reserved_field_name_schema_dataclass.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reserved_field_name_schema_dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reserved_field_name_schema_dataclass.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  reserved_field_name_schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+
+
+@dataclass
+class ModelWithSchemaField:
+    schema: str
+    name: str
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reserved_field_name_schema_pydantic.py 0.45.0-1/tests/data/expected/main/jsonschema/reserved_field_name_schema_pydantic.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reserved_field_name_schema_pydantic.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reserved_field_name_schema_pydantic.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  reserved_field_name_schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class ModelWithSchemaField(BaseModel):
+    schema_: str = Field(
+        ...,
+        alias='schema',
+        description="A field named 'schema' that should not be renamed in non-Pydantic models",
+    )
+    name: str
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reserved_field_name_schema_typed_dict.py 0.45.0-1/tests/data/expected/main/jsonschema/reserved_field_name_schema_typed_dict.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reserved_field_name_schema_typed_dict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reserved_field_name_schema_typed_dict.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  reserved_field_name_schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import TypedDict
+
+
+class ModelWithSchemaField(TypedDict):
+    schema: str
+    name: str
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reserved_property.py 0.45.0-1/tests/data/expected/main/jsonschema/reserved_property.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reserved_property.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reserved_property.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  reserved_property.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class ReservedNames(BaseModel):
+    json_: str = Field(..., alias='json')
+    schema_: str = Field(..., alias='schema')
+    dict_: int = Field(..., alias='dict')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  reuse_scope_tree
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree/schema_a.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree/schema_a.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree/schema_a.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree/schema_a.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  reuse_scope_tree
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .shared import SharedModel as SharedModel_1
+
+
+class SharedModel(SharedModel_1):
+    pass
+
+
+class Model(BaseModel):
+    data: Optional[SharedModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree/schema_b.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree/schema_b.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree/schema_b.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree/schema_b.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  schema_b.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import shared
+
+
+class Model(BaseModel):
+    info: Optional[shared.SharedModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree/shared.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree/shared.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree/shared.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree/shared.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  shared.py
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class SharedModel(BaseModel):
+    id: Optional[int] = None
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  reuse_scope_tree_branch
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/aaa_first.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/aaa_first.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/aaa_first.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/aaa_first.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  aaa_first.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class UniqueModelA(BaseModel):
+    name: Optional[str] = None
+
+
+class Model(BaseModel):
+    data: Optional[UniqueModelA] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/bbb_second.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/bbb_second.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/bbb_second.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/bbb_second.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  bbb_second.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class UniqueModelB(BaseModel):
+    value: Optional[int] = None
+
+
+class Model(BaseModel):
+    info: Optional[UniqueModelB] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/shared.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/shared.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/shared.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/shared.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  shared.py
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class SharedModel(BaseModel):
+    id: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/yyy_duplicate.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/yyy_duplicate.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/yyy_duplicate.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/yyy_duplicate.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  reuse_scope_tree_branch
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .shared import SharedModel as SharedModel_1
+
+
+class SharedModel(SharedModel_1):
+    pass
+
+
+class Model(BaseModel):
+    data: Optional[SharedModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/zzz_last.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/zzz_last.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/zzz_last.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_branch/zzz_last.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  zzz_last.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import shared
+
+
+class Model(BaseModel):
+    item: Optional[shared.SharedModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_dataclass/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_dataclass/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_dataclass/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_dataclass/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  reuse_scope_tree_dataclass
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_dataclass/schema_a.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_dataclass/schema_a.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_dataclass/schema_a.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_dataclass/schema_a.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  reuse_scope_tree_dataclass
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Optional
+
+from .shared import SharedModel as SharedModel_1
+
+
+@dataclass
+class SharedModel(SharedModel_1):
+    pass
+
+
+@dataclass
+class Model:
+    data: Optional[SharedModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_dataclass/schema_b.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_dataclass/schema_b.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_dataclass/schema_b.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_dataclass/schema_b.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  schema_b.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Optional
+
+from . import shared
+
+
+@dataclass
+class Model:
+    info: Optional[shared.SharedModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_dataclass/shared.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_dataclass/shared.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_dataclass/shared.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_dataclass/shared.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  shared.py
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Optional
+
+
+@dataclass
+class SharedModel:
+    id: Optional[int] = None
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_enum/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_enum/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_enum/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_enum/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  reuse_scope_tree_enum
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_enum/schema_a.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_enum/schema_a.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_enum/schema_a.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_enum/schema_a.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  schema_a.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import shared
+
+
+class Model(BaseModel):
+    status: Optional[shared.Status] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_enum/schema_b.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_enum/schema_b.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_enum/schema_b.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_enum/schema_b.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  schema_b.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import shared
+
+
+class Model(BaseModel):
+    state: Optional[shared.Status] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_enum/shared.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_enum/shared.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_enum/shared.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_enum/shared.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  shared.py
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class Status(Enum):
+    active = 'active'
+    inactive = 'inactive'
+    pending = 'pending'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  reuse_scope_tree_multi
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/schema_a.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/schema_a.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/schema_a.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/schema_a.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  schema_a.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .shared import SharedModel2 as SharedModel2_1
+
+
+class ModelA(BaseModel):
+    name: Optional[str] = None
+
+
+class SharedModel2(SharedModel2_1):
+    pass
+
+
+class Model(BaseModel):
+    data: Optional[ModelA] = None
+    info: Optional[SharedModel2] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/schema_b.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/schema_b.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/schema_b.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/schema_b.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  reuse_scope_tree_multi
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .shared import SharedModel as SharedModel_1
+
+
+class SharedModel(SharedModel_1):
+    pass
+
+
+class Model(BaseModel):
+    info: Optional[SharedModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/schema_c.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/schema_c.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/schema_c.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/schema_c.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  reuse_scope_tree_multi
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import shared
+from .shared import SharedModel as SharedModel_1
+
+
+class SharedModel(SharedModel_1):
+    pass
+
+
+class Model(BaseModel):
+    data: Optional[SharedModel] = None
+    extra: Optional[shared.SharedModel2] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/schema_d.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/schema_d.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/schema_d.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/schema_d.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  schema_d.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import shared
+
+
+class Model(BaseModel):
+    item: Optional[shared.SharedModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/shared.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/shared.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/shared.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_multi/shared.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  shared.py
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class SharedModel(BaseModel):
+    id: Optional[int] = None
+
+
+class SharedModel2(BaseModel):
+    value: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_no_conflict_dir/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_no_conflict_dir/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_no_conflict_dir/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_no_conflict_dir/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  reuse_scope_tree_no_conflict_dir
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_no_conflict_dir/other.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_no_conflict_dir/other.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_no_conflict_dir/other.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_no_conflict_dir/other.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  other.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ModelB(BaseModel):
+    id: Optional[int] = None
+
+
+class Model(BaseModel):
+    info: Optional[ModelB] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_no_conflict_dir/shared/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_no_conflict_dir/shared/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_no_conflict_dir/shared/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_no_conflict_dir/shared/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  reuse_scope_tree_no_conflict_dir
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_no_conflict_dir/shared/schema.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_no_conflict_dir/shared/schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_no_conflict_dir/shared/schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_no_conflict_dir/shared/schema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  shared/schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ModelA(BaseModel):
+    name: Optional[str] = None
+
+
+class Model(BaseModel):
+    data: Optional[ModelA] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_no_dup/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_no_dup/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_no_dup/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_no_dup/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  reuse_scope_tree_no_dup
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_no_dup/schema_a.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_no_dup/schema_a.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_no_dup/schema_a.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_no_dup/schema_a.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  schema_a.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_no_dup/schema_b.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_no_dup/schema_b.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_no_dup/schema_b.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_no_dup/schema_b.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  schema_b.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    id: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_self_ref/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_self_ref/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_self_ref/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_self_ref/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  reuse_scope_tree_self_ref
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_self_ref/schema_a.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_self_ref/schema_a.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_self_ref/schema_a.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_self_ref/schema_a.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,22 @@
+# generated by datamodel-codegen:
+#   filename:  reuse_scope_tree_self_ref
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .shared import Node as Node_1
+
+
+class Node(Node_1):
+    pass
+
+
+class Model(BaseModel):
+    node: Optional[Node] = None
+
+
+Node.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_self_ref/schema_b.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_self_ref/schema_b.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_self_ref/schema_b.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_self_ref/schema_b.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  schema_b.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import shared
+
+
+class Model(BaseModel):
+    tree: Optional[shared.Node] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_self_ref/shared.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_self_ref/shared.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_self_ref/shared.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_self_ref/shared.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  shared.py
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Node(BaseModel):
+    id: Optional[int] = None
+    child: Optional[Node] = None
+
+
+Node.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_typeddict/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_typeddict/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_typeddict/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_typeddict/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  reuse_scope_tree_typeddict
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_typeddict/schema_a.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_typeddict/schema_a.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_typeddict/schema_a.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_typeddict/schema_a.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  schema_a.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import TypedDict
+
+from typing_extensions import NotRequired
+
+from . import shared
+
+
+class Model(TypedDict):
+    data: NotRequired[shared.SharedModel]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_typeddict/schema_b.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_typeddict/schema_b.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_typeddict/schema_b.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_typeddict/schema_b.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  schema_b.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import TypedDict
+
+from typing_extensions import NotRequired
+
+from . import shared
+
+
+class Model(TypedDict):
+    info: NotRequired[shared.SharedModel]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_typeddict/shared.py 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_typeddict/shared.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/reuse_scope_tree_typeddict/shared.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/reuse_scope_tree_typeddict/shared.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  shared.py
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import TypedDict
+
+from typing_extensions import NotRequired
+
+
+class SharedModel(TypedDict):
+    id: NotRequired[int]
+    name: NotRequired[str]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_id.py 0.45.0-1/tests/data/expected/main/jsonschema/root_id.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_id.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/root_id.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  root_id.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel, Field, conint
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Person(BaseModel):
+    firstName: Optional[str] = Field(None, description="The person's first name.")
+    lastName: Optional[str] = Field(None, description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[List[Any]] = None
+    comment: None = None
+
+
+class OriginalPerson(BaseModel):
+    __root__: Person
+
+
+class Pet(BaseModel):
+    name: Optional[str] = Field(None, examples=['dog', 'cat'])
+    owner: Optional[Person] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_id_absolute_url.py 0.45.0-1/tests/data/expected/main/jsonschema/root_id_absolute_url.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_id_absolute_url.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/root_id_absolute_url.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  root_id_absolute_url.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel, Field, conint
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Person(BaseModel):
+    firstName: Optional[str] = Field(None, description="The person's first name.")
+    lastName: Optional[str] = Field(None, description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[List[Any]] = None
+    comment: None = None
+
+
+class OriginalPerson(BaseModel):
+    __root__: Person
+
+
+class Pet(BaseModel):
+    name: Optional[str] = Field(None, examples=['dog', 'cat'])
+    owner: Optional[Person] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_in_enum.py 0.45.0-1/tests/data/expected/main/jsonschema/root_in_enum.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_in_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/root_in_enum.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  enum_in_root.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Model(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    order_reference: Optional[str] = Field(
+        None,
+        alias='orderReference',
+        description='Reference number of the order',
+        examples=['27378669'],
+    )
+    brand: Optional[Literal['OPUS', 'someday']] = Field(
+        None, description='purchased brand'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_model_default_value.py 0.45.0-1/tests/data/expected/main/jsonschema/root_model_default_value.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_model_default_value.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/root_model_default_value.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+# generated by datamodel-codegen:
+#   filename:  root_model_default_value.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Annotated, Optional
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class AdminStateLeaf(Enum):
+    enable = 'enable'
+    disable = 'disable'
+
+
+class CountType(RootModel[int]):
+    root: Annotated[int, Field(ge=0, le=100)]
+
+
+class NameType(RootModel[str]):
+    root: Annotated[str, Field(max_length=50, min_length=1)]
+
+
+class Model(BaseModel):
+    admin_state: Optional[AdminStateLeaf] = AdminStateLeaf.enable
+    count: Annotated[
+        Optional[CountType],
+        Field(default_factory=lambda: CountType.model_validate(CountType(10))),
+    ]
+    name: Annotated[
+        Optional[NameType],
+        Field(
+            default_factory=lambda: NameType.model_validate(NameType('default_name'))
+        ),
+    ]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_model_default_value_branches.py 0.45.0-1/tests/data/expected/main/jsonschema/root_model_default_value_branches.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_model_default_value_branches.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/root_model_default_value_branches.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  root_model_default_value_branches.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Optional
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class CountType(RootModel[int]):
+    root: Annotated[int, Field(ge=0, le=100)]
+
+
+class Model(BaseModel):
+    count_with_default: Annotated[
+        Optional[CountType],
+        Field(default_factory=lambda: CountType.model_validate(CountType(10))),
+    ]
+    count_no_default: Optional[CountType] = None
+    count_list_default: Annotated[
+        Optional[List[CountType]],
+        Field(default_factory=lambda: [CountType.model_validate(v) for v in [1, 2, 3]]),
+    ]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_model_default_value_no_annotated.py 0.45.0-1/tests/data/expected/main/jsonschema/root_model_default_value_no_annotated.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_model_default_value_no_annotated.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/root_model_default_value_no_annotated.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  root_model_default_value.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel, Field, RootModel, conint, constr
+
+
+class AdminStateLeaf(Enum):
+    enable = 'enable'
+    disable = 'disable'
+
+
+class CountType(RootModel[conint(ge=0, le=100)]):
+    root: conint(ge=0, le=100)
+
+
+class NameType(RootModel[constr(min_length=1, max_length=50)]):
+    root: constr(min_length=1, max_length=50)
+
+
+class Model(BaseModel):
+    admin_state: Optional[AdminStateLeaf] = AdminStateLeaf.enable
+    count: Optional[CountType] = Field(
+        default_factory=lambda: CountType.model_validate(10)
+    )
+    name: Optional[NameType] = Field(
+        default_factory=lambda: NameType.model_validate('default_name')
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_model_default_value_non_root.py 0.45.0-1/tests/data/expected/main/jsonschema/root_model_default_value_non_root.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_model_default_value_non_root.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/root_model_default_value_non_root.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+# generated by datamodel-codegen:
+#   filename:  root_model_default_value_non_root.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Optional
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class CountType(RootModel[int]):
+    root: Annotated[int, Field(ge=0, le=100)]
+
+
+class PersonType(BaseModel):
+    name: Optional[str] = None
+
+
+class Model(BaseModel):
+    root_model_field: Annotated[
+        Optional[CountType],
+        Field(default_factory=lambda: CountType.model_validate(CountType(10))),
+    ]
+    non_root_model_field: Annotated[
+        Optional[PersonType],
+        Field(default_factory=lambda: PersonType.model_validate({'name': 'John'})),
+    ]
+    primitive_field: Optional[str] = 'hello'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties.py 0.45.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,49 @@
+# generated by datamodel-codegen:
+#   filename:  root_model_with_additional_properties.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Dict, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class NestedObjectResult(BaseModel):
+    status: int
+
+
+class NestedEnumResult(Enum):
+    red = 'red'
+    green = 'green'
+
+
+class OneOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class AnyOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class User(BaseModel):
+    name: Optional[str] = None
+
+
+class AllOfResult(User):
+    description: Optional[str] = None
+
+
+class Model(BaseModel):
+    test_id: str = Field(..., description='test ID')
+    test_ip: str = Field(..., description='test IP')
+    result: Dict[str, int]
+    nested_object_result: Dict[str, NestedObjectResult]
+    nested_enum_result: Dict[str, NestedEnumResult]
+    all_of_result: Optional[Dict[str, AllOfResult]] = None
+    one_of_result: Optional[Dict[str, Union[User, OneOfResult]]] = None
+    any_of_result: Optional[Dict[str, Union[User, AnyOfResult]]] = None
+    all_of_with_unknown_object: Optional[Dict[str, User]] = None
+    objectRef: Optional[Dict[str, User]] = None
+    deepNestedObjectRef: Optional[Dict[str, Dict[str, Dict[str, User]]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties_custom_class_name.py 0.45.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties_custom_class_name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties_custom_class_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties_custom_class_name.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,49 @@
+# generated by datamodel-codegen:
+#   filename:  root_model_with_additional_properties.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Dict, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class CustomNestedObjectResult(BaseModel):
+    status: int
+
+
+class CustomNestedEnumResult(Enum):
+    red = 'red'
+    green = 'green'
+
+
+class CustomOneOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class CustomAnyOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class CustomUser(BaseModel):
+    name: Optional[str] = None
+
+
+class CustomAllOfResult(CustomUser):
+    description: Optional[str] = None
+
+
+class CustomModel(BaseModel):
+    test_id: str = Field(..., description='test ID')
+    test_ip: str = Field(..., description='test IP')
+    result: Dict[str, int]
+    nested_object_result: Dict[str, CustomNestedObjectResult]
+    nested_enum_result: Dict[str, CustomNestedEnumResult]
+    all_of_result: Optional[Dict[str, CustomAllOfResult]] = None
+    one_of_result: Optional[Dict[str, Union[CustomUser, CustomOneOfResult]]] = None
+    any_of_result: Optional[Dict[str, Union[CustomUser, CustomAnyOfResult]]] = None
+    all_of_with_unknown_object: Optional[Dict[str, CustomUser]] = None
+    objectRef: Optional[Dict[str, CustomUser]] = None
+    deepNestedObjectRef: Optional[Dict[str, Dict[str, Dict[str, CustomUser]]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties_literal.py 0.45.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties_literal.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties_literal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties_literal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,43 @@
+# generated by datamodel-codegen:
+#   filename:  root_model_with_additional_properties.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class NestedObjectResult(BaseModel):
+    status: int
+
+
+class OneOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class AnyOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class User(BaseModel):
+    name: Optional[str] = None
+
+
+class AllOfResult(User):
+    description: Optional[str] = None
+
+
+class Model(BaseModel):
+    test_id: str = Field(..., description='test ID')
+    test_ip: str = Field(..., description='test IP')
+    result: Dict[str, int]
+    nested_object_result: Dict[str, NestedObjectResult]
+    nested_enum_result: Dict[str, Literal['red', 'green']]
+    all_of_result: Optional[Dict[str, AllOfResult]] = None
+    one_of_result: Optional[Dict[str, Union[User, OneOfResult]]] = None
+    any_of_result: Optional[Dict[str, Union[User, AnyOfResult]]] = None
+    all_of_with_unknown_object: Optional[Dict[str, User]] = None
+    objectRef: Optional[Dict[str, User]] = None
+    deepNestedObjectRef: Optional[Dict[str, Dict[str, Dict[str, User]]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties_use_generic_container_types.py 0.45.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties_use_generic_container_types.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties_use_generic_container_types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties_use_generic_container_types.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,49 @@
+# generated by datamodel-codegen:
+#   filename:  root_model_with_additional_properties.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Mapping, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class NestedObjectResult(BaseModel):
+    status: int
+
+
+class NestedEnumResult(Enum):
+    red = 'red'
+    green = 'green'
+
+
+class OneOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class AnyOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class User(BaseModel):
+    name: Optional[str] = None
+
+
+class AllOfResult(User):
+    description: Optional[str] = None
+
+
+class Model(BaseModel):
+    test_id: str = Field(..., description='test ID')
+    test_ip: str = Field(..., description='test IP')
+    result: Mapping[str, int]
+    nested_object_result: Mapping[str, NestedObjectResult]
+    nested_enum_result: Mapping[str, NestedEnumResult]
+    all_of_result: Optional[Mapping[str, AllOfResult]] = None
+    one_of_result: Optional[Mapping[str, Union[User, OneOfResult]]] = None
+    any_of_result: Optional[Mapping[str, Union[User, AnyOfResult]]] = None
+    all_of_with_unknown_object: Optional[Mapping[str, User]] = None
+    objectRef: Optional[Mapping[str, User]] = None
+    deepNestedObjectRef: Optional[Mapping[str, Mapping[str, Mapping[str, User]]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties_use_standard_collections.py 0.45.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties_use_standard_collections.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_model_with_additional_properties_use_standard_collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/root_model_with_additional_properties_use_standard_collections.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,49 @@
+# generated by datamodel-codegen:
+#   filename:  root_model_with_additional_properties.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class NestedObjectResult(BaseModel):
+    status: int
+
+
+class NestedEnumResult(Enum):
+    red = 'red'
+    green = 'green'
+
+
+class OneOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class AnyOfResult(BaseModel):
+    description: Optional[str] = None
+
+
+class User(BaseModel):
+    name: Optional[str] = None
+
+
+class AllOfResult(User):
+    description: Optional[str] = None
+
+
+class Model(BaseModel):
+    test_id: str = Field(..., description='test ID')
+    test_ip: str = Field(..., description='test IP')
+    result: dict[str, int]
+    nested_object_result: dict[str, NestedObjectResult]
+    nested_enum_result: dict[str, NestedEnumResult]
+    all_of_result: Optional[dict[str, AllOfResult]] = None
+    one_of_result: Optional[dict[str, Union[User, OneOfResult]]] = None
+    any_of_result: Optional[dict[str, Union[User, AnyOfResult]]] = None
+    all_of_with_unknown_object: Optional[dict[str, User]] = None
+    objectRef: Optional[dict[str, User]] = None
+    deepNestedObjectRef: Optional[dict[str, dict[str, dict[str, User]]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_one_of/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/root_one_of/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_one_of/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/root_one_of/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  root_one_of
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_one_of/bar.py 0.45.0-1/tests/data/expected/main/jsonschema/root_one_of/bar.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_one_of/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/root_one_of/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  bar.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class JobRun(BaseModel):
+    enabled: Optional[bool] = Field(False, description='If Live Execution is Enabled.')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_one_of/foo.py 0.45.0-1/tests/data/expected/main/jsonschema/root_one_of/foo.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_one_of/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/root_one_of/foo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  foo.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class JobRun(BaseModel):
+    enabled: Optional[bool] = Field(False, description='If Live Execution is enabled')
+    resources: Optional[List[str]] = Field(
+        None, description='Resource full classname to register to extend any endpoints.'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/root_one_of/union.py 0.45.0-1/tests/data/expected/main/jsonschema/root_one_of/union.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/root_one_of/union.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/root_one_of/union.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  union.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel, Extra, Field
+
+from . import bar, foo
+
+
+class ExecutionContext(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    __root__: Union[foo.JobRun, bar.JobRun] = Field(
+        ..., description='Execution Configuration.'
+    )
+
+
+class App(BaseModel):
+    runtime: Optional[ExecutionContext] = Field(
+        None, description='Execution Configuration.'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/same_name_objects.py 0.45.0-1/tests/data/expected/main/jsonschema/same_name_objects.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/same_name_objects.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/same_name_objects.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  same_name_objects.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List
+
+from pydantic import BaseModel, Extra
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Friends(BaseModel):
+    pass
+
+    class Config:
+        extra = Extra.forbid
+
+
+class FriendsModel(BaseModel):
+    __root__: List[Any]
+
+
+class Tst2(BaseModel):
+    __root__: FriendsModel
+
+
+class Tst1(BaseModel):
+    __root__: FriendsModel
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/self_reference.py 0.45.0-1/tests/data/expected/main/jsonschema/self_reference.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/self_reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/self_reference.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  self_reference.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Pet(BaseModel):
+    name: Optional[str] = Field(None, examples=['dog', 'cat'])
+    friends: Optional[List[Pet]] = None
+
+
+Pet.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/similar_nested_array.py 0.45.0-1/tests/data/expected/main/jsonschema/similar_nested_array.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/similar_nested_array.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/similar_nested_array.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,66 @@
+# generated by datamodel-codegen:
+#   filename:  similar_nested_array.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Datum(BaseModel):
+    keyA: Optional[str] = None
+
+
+class ObjectA(BaseModel):
+    data: Optional[List[Datum]] = None
+
+
+class Datum1(BaseModel):
+    keyB: Optional[str] = None
+
+
+class ObjectB(BaseModel):
+    data: Optional[List[Datum1]] = None
+
+
+class KeyC(BaseModel):
+    nestedA: Optional[str] = None
+
+
+class KeyC1(BaseModel):
+    nestedB: Optional[str] = None
+
+
+class ObjectC(BaseModel):
+    keyC: Optional[Union[KeyC, KeyC1]] = None
+
+
+class KeyCItem(BaseModel):
+    nestedA: Optional[str] = None
+
+
+class KeyCItem1(BaseModel):
+    nestedB: Optional[str] = None
+
+
+class KeyCEnum(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class KeyCEnum1(Enum):
+    orange = 'orange'
+    apple = 'apple'
+    milk = 'milk'
+
+
+class ObjectD(BaseModel):
+    keyC: Optional[List[Union[KeyCItem, KeyCItem1, KeyCEnum, KeyCEnum1]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/space_field_enum_snake_case_field.py 0.45.0-1/tests/data/expected/main/jsonschema/space_field_enum_snake_case_field.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/space_field_enum_snake_case_field.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/space_field_enum_snake_case_field.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  space_field_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class SpaceIF(Enum):
+    space_field = 'Space Field'
+
+
+class Model(BaseModel):
+    space_if: Optional[SpaceIF] = Field(None, alias='SpaceIF')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/special_enum.py 0.45.0-1/tests/data/expected/main/jsonschema/special_enum.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/special_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/special_enum.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  special_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ModelEnum(Enum):
+    True_ = True
+    False_ = False
+    field_ = ''
+    field__1 = '\n'
+    field__ = '\r\n'
+    field__2 = '\t'
+    field__3 = '\b'
+    field__4 = '\\'
+
+
+class Model(BaseModel):
+    __root__: Optional[ModelEnum] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/special_enum_empty_enum_field_name.py 0.45.0-1/tests/data/expected/main/jsonschema/special_enum_empty_enum_field_name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/special_enum_empty_enum_field_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/special_enum_empty_enum_field_name.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  special_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ModelEnum(Enum):
+    True_ = True
+    False_ = False
+    empty = ''
+    field_ = '\n'
+    field__ = '\r\n'
+    field__1 = '\t'
+    field__2 = '\b'
+    field__3 = '\\'
+
+
+class Model(BaseModel):
+    __root__: Optional[ModelEnum] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/special_enum_special_field_name_prefix.py 0.45.0-1/tests/data/expected/main/jsonschema/special_enum_special_field_name_prefix.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/special_enum_special_field_name_prefix.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/special_enum_special_field_name_prefix.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  special_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ModelEnum(Enum):
+    True_ = True
+    False_ = False
+    special_ = ''
+    special__1 = '\n'
+    special__ = '\r\n'
+    special__2 = '\t'
+    special__3 = '\b'
+    special__4 = '\\'
+
+
+class Model(BaseModel):
+    __root__: Optional[ModelEnum] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/special_enum_special_field_name_prefix_keep_private.py 0.45.0-1/tests/data/expected/main/jsonschema/special_enum_special_field_name_prefix_keep_private.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/special_enum_special_field_name_prefix_keep_private.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/special_enum_special_field_name_prefix_keep_private.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  special_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ModelEnum(Enum):
+    True_ = True
+    False_ = False
+    _ = ''
+    __1 = '\n'
+    __ = '\r\n'
+    __2 = '\t'
+    __3 = '\b'
+    __4 = '\\'
+
+
+class Model(BaseModel):
+    __root__: Optional[ModelEnum] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/special_field_name.py 0.45.0-1/tests/data/expected/main/jsonschema/special_field_name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/special_field_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/special_field_name.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  special_field_name.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class SpecialField(BaseModel):
+    global_: Optional[str] = Field(None, alias='global')
+    with_: Optional[str] = Field(None, alias='with')
+    class_: Optional[int] = Field(None, alias='class')
+    class_s: Optional[int] = Field(None, alias="class's")
+    class_s_1: Optional[str] = Field(None, alias='class-s')
+    field_: Optional[str] = Field(None, alias='#')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/special_model_remove_special_field_name_prefix.py 0.45.0-1/tests/data/expected/main/jsonschema/special_model_remove_special_field_name_prefix.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/special_model_remove_special_field_name_prefix.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/special_model_remove_special_field_name_prefix.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  special_prefix_model.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Model(BaseModel):
+    id: AnyUrl = Field(..., alias='@id', title='Id must be presesnt and must be a URI')
+    type: str = Field(..., alias='@type')
+    type_1: Optional[str] = Field(None, alias='@+!type')
+    type_2: Optional[str] = Field(None, alias='@-!type')
+    profile: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/stdin_oneof_ref.py 0.45.0-1/tests/data/expected/main/jsonschema/stdin_oneof_ref.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/stdin_oneof_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/stdin_oneof_ref.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  <stdin>
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Test(BaseModel):
+    name: Optional[str] = None
+
+
+class Model(BaseModel):
+    __root__: Test
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/strict_types.py 0.45.0-1/tests/data/expected/main/jsonschema/strict_types.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/strict_types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/strict_types.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  strict_types.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field, conbytes, confloat, conint, constr
+
+
+class User(BaseModel):
+    name: Optional[str] = Field(None, example='ken')
+    age: Optional[int] = None
+    salary: Optional[conint(ge=0)] = None
+    debt: Optional[conint(le=0)] = None
+    loan: Optional[confloat(le=0.0)] = None
+    tel: Optional[constr(regex=r'^(\([0-9]{3}\))?[0-9]{3}-[0-9]{4}$')] = None
+    height: Optional[confloat(ge=0.0)] = None
+    weight: Optional[confloat(ge=0.0)] = None
+    score: Optional[confloat(ge=1e-08)] = None
+    active: Optional[bool] = None
+    photo: Optional[conbytes(min_length=100)] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/strict_types_all.py 0.45.0-1/tests/data/expected/main/jsonschema/strict_types_all.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/strict_types_all.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/strict_types_all.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,35 @@
+# generated by datamodel-codegen:
+#   filename:  strict_types.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import (
+    BaseModel,
+    Field,
+    StrictBool,
+    StrictBytes,
+    StrictInt,
+    StrictStr,
+    confloat,
+    conint,
+    constr,
+)
+
+
+class User(BaseModel):
+    name: Optional[StrictStr] = Field(None, example='ken')
+    age: Optional[StrictInt] = None
+    salary: Optional[conint(ge=0, strict=True)] = None
+    debt: Optional[conint(le=0, strict=True)] = None
+    loan: Optional[confloat(le=0.0, strict=True)] = None
+    tel: Optional[
+        constr(regex=r'^(\([0-9]{3}\))?[0-9]{3}-[0-9]{4}$', strict=True)
+    ] = None
+    height: Optional[confloat(ge=0.0, strict=True)] = None
+    weight: Optional[confloat(ge=0.0, strict=True)] = None
+    score: Optional[confloat(ge=1e-08, strict=True)] = None
+    active: Optional[StrictBool] = None
+    photo: Optional[StrictBytes] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/strict_types_all_field_constraints.py 0.45.0-1/tests/data/expected/main/jsonschema/strict_types_all_field_constraints.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/strict_types_all_field_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/strict_types_all_field_constraints.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,31 @@
+# generated by datamodel-codegen:
+#   filename:  strict_types.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import (
+    BaseModel,
+    Field,
+    StrictBool,
+    StrictBytes,
+    StrictFloat,
+    StrictInt,
+    StrictStr,
+)
+
+
+class User(BaseModel):
+    name: Optional[StrictStr] = Field(None, example='ken')
+    age: Optional[StrictInt] = None
+    salary: Optional[StrictInt] = Field(None, ge=0)
+    debt: Optional[StrictInt] = Field(None, le=0)
+    loan: Optional[StrictFloat] = Field(None, le=0.0)
+    tel: Optional[StrictStr] = Field(None, regex='^(\\([0-9]{3}\\))?[0-9]{3}-[0-9]{4}$')
+    height: Optional[StrictFloat] = Field(None, ge=0.0)
+    weight: Optional[StrictFloat] = Field(None, ge=0.0)
+    score: Optional[StrictFloat] = Field(None, ge=1e-08)
+    active: Optional[StrictBool] = None
+    photo: Optional[StrictBytes] = Field(None, min_length=100)
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/string_dict.py 0.45.0-1/tests/data/expected/main/jsonschema/string_dict.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/string_dict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/string_dict.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  string_dict.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict
+
+from pydantic import BaseModel
+
+
+class MyStringDict(BaseModel):
+    __root__: Dict[str, str]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/subclass_enum.py 0.45.0-1/tests/data/expected/main/jsonschema/subclass_enum.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/subclass_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/subclass_enum.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,53 @@
+# generated by datamodel-codegen:
+#   filename:  subclass_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum, IntEnum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class IntEnumModel(IntEnum):
+    integer_1 = 1
+    integer_2 = 2
+    integer_3 = 3
+
+
+class FloatEnum(float, Enum):
+    number_1_1 = 1.1
+    number_2_1 = 2.1
+    number_3_1 = 3.1
+
+
+class StrEnum(str, Enum):
+    field_1 = '1'
+    field_2 = '2'
+    field_3 = '3'
+
+
+class NonTypedEnum(Enum):
+    field_1 = '1'
+    field_2 = '2'
+    field_3 = '3'
+
+
+class BooleanEnum(Enum):
+    boolean_True = True
+    boolean_False = False
+
+
+class UnknownEnum(Enum):
+    a = 'a'
+    b = 'b'
+
+
+class Model(BaseModel):
+    IntEnum: Optional[IntEnumModel] = None
+    FloatEnum: Optional[FloatEnum] = None
+    StrEnum: Optional[StrEnum] = None
+    NonTypedEnum: Optional[NonTypedEnum] = None
+    BooleanEnum: Optional[BooleanEnum] = None
+    UnknownEnum: Optional[UnknownEnum] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/title_with_dots.py 0.45.0-1/tests/data/expected/main/jsonschema/title_with_dots.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/title_with_dots.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/title_with_dots.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  title_with_dots.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Nested15(BaseModel):
+    value: Optional[int] = None
+
+
+class GenomeStudio20MethylationModule(BaseModel):
+    version: Optional[str] = None
+    nested: Optional[Nested15] = Field(None, title='Nested 1.5')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/titles.py 0.45.0-1/tests/data/expected/main/jsonschema/titles.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/titles.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/titles.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,57 @@
+# generated by datamodel-codegen:
+#   filename:  titles.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class ProcessingStatus(Enum):
+    COMPLETED = 'COMPLETED'
+    PENDING = 'PENDING'
+    FAILED = 'FAILED'
+
+
+class Kind(BaseModel):
+    __root__: str
+
+
+class ExtendedProcessingTask1(BaseModel):
+    comment: Optional[str] = None
+
+
+class ProcessingStatusUnion(BaseModel):
+    id: Optional[int] = None
+    description: Optional[str] = None
+
+
+class ProcessingTasksTitle(BaseModel):
+    __root__: List[ProcessingTask] = Field(..., title='Processing Tasks Title')
+
+
+class ExtendedProcessingTask(BaseModel):
+    __root__: Union[ProcessingTasksTitle, ExtendedProcessingTask1] = Field(
+        ..., title='Extended Processing Task Title'
+    )
+
+
+class ExtendedProcessingTasks(BaseModel):
+    __root__: List[ExtendedProcessingTask] = Field(
+        ..., title='Extended Processing Tasks Title'
+    )
+
+
+class ProcessingTask(BaseModel):
+    processing_status_union: Optional[
+        Union[ProcessingStatusUnion, ExtendedProcessingTask, ProcessingStatus]
+    ] = Field('COMPLETED', title='Processing Status Union Title')
+    processing_status: Optional[ProcessingStatus] = 'COMPLETED'
+    name: Optional[str] = None
+    kind: Optional[Kind] = None
+
+
+ProcessingTasksTitle.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/titles_use_title_as_name.py 0.45.0-1/tests/data/expected/main/jsonschema/titles_use_title_as_name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/titles_use_title_as_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/titles_use_title_as_name.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,57 @@
+# generated by datamodel-codegen:
+#   filename:  titles.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class ProcessingStatusTitle(Enum):
+    COMPLETED = 'COMPLETED'
+    PENDING = 'PENDING'
+    FAILED = 'FAILED'
+
+
+class Kind(BaseModel):
+    __root__: str
+
+
+class NestedCommentTitle(BaseModel):
+    comment: Optional[str] = None
+
+
+class ProcessingStatusDetail(BaseModel):
+    id: Optional[int] = None
+    description: Optional[str] = None
+
+
+class ProcessingTasksTitle(BaseModel):
+    __root__: List[ProcessingTaskTitle] = Field(..., title='Processing Tasks Title')
+
+
+class ExtendedProcessingTask(BaseModel):
+    __root__: Union[ProcessingTasksTitle, NestedCommentTitle] = Field(
+        ..., title='Extended Processing Task Title'
+    )
+
+
+class ExtendedProcessingTasksTitle(BaseModel):
+    __root__: List[ExtendedProcessingTask] = Field(
+        ..., title='Extended Processing Tasks Title'
+    )
+
+
+class ProcessingTaskTitle(BaseModel):
+    processing_status_union: Optional[
+        Union[ProcessingStatusDetail, ExtendedProcessingTask, ProcessingStatusTitle]
+    ] = Field('COMPLETED', title='Processing Status Union Title')
+    processing_status: Optional[ProcessingStatusTitle] = 'COMPLETED'
+    name: Optional[str] = None
+    kind: Optional[Kind] = None
+
+
+ProcessingTasksTitle.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  treat_dot_as_module
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  treat_dot_as_module
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  treat_dot_as_module
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  treat_dot_as_module
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/basic_enum.py 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/basic_enum.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/basic_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/basic_enum.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  complex.directory/api.path.basic_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ProcessingStatus(Enum):
+    COMPLETED = 'COMPLETED'
+    PENDING = 'PENDING'
+    FAILED = 'FAILED'
+
+
+class BasicEnum(BaseModel):
+    processing_status: Optional[ProcessingStatus] = 'COMPLETED'
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/input.py 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/input.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/input.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/input.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  complex.directory/api.path.input.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel, Field
+
+from ... import schema
+
+
+class Input(BaseModel):
+    input: Optional[Any] = Field('input', title='Input')
+    extType: Optional[schema.ExtType] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/output.py 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/output.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/api/path/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  complex.directory/api.path.output.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Output(BaseModel):
+    output: Optional[Any] = Field('output', title='Output')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/schema.py 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module/complex/directory/schema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  complex.directory/schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel, Field
+
+
+class ExtType(BaseModel):
+    ExtType: Optional[Any] = Field(None, title='ExtType')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module_single/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module_single/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module_single/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module_single/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  treat_dot_as_module_single
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module_single/model/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module_single/model/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module_single/model/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module_single/model/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  treat_dot_as_module_single
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module_single/model/schema.py 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module_single/model/schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_as_module_single/model/schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_as_module_single/model/schema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  model.schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class User(BaseModel):
+    name: str
+    age: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  treat_dot_as_module
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_basic_enum.py 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_basic_enum.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_basic_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_basic_enum.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  complex.directory/api.path.basic_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ProcessingStatus(Enum):
+    COMPLETED = 'COMPLETED'
+    PENDING = 'PENDING'
+    FAILED = 'FAILED'
+
+
+class BasicEnum(BaseModel):
+    processing_status: Optional[ProcessingStatus] = 'COMPLETED'
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_input.py 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_input.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_input.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_input.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  complex.directory/api.path.input.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel, Field
+
+from . import schema
+
+
+class Input(BaseModel):
+    input: Optional[Any] = Field('input', title='Input')
+    extType: Optional[schema.ExtType] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_output.py 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_output.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/api_path_output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  complex.directory/api.path.output.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Output(BaseModel):
+    output: Optional[Any] = Field('output', title='Output')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/schema.py 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/schema.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/treat_dot_not_as_module/complex.directory/schema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  complex.directory/schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel, Field
+
+
+class ExtType(BaseModel):
+    ExtType: Optional[Any] = Field(None, title='ExtType')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/tutorial_pet.py 0.45.0-1/tests/data/expected/main/jsonschema/tutorial_pet.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/tutorial_pet.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/tutorial_pet.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  tutorial_pet.json
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel, Field, conint
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    bird = 'bird'
+    fish = 'fish'
+
+
+class Pet(BaseModel):
+    name: str = Field(..., description="The pet's name")
+    species: Species
+    age: Optional[conint(ge=0)] = Field(None, description='Age in years')
+    vaccinated: Optional[bool] = False
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/tutorial_pet_v2.py 0.45.0-1/tests/data/expected/main/jsonschema/tutorial_pet_v2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/tutorial_pet_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/tutorial_pet_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  tutorial_pet.json
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    bird = 'bird'
+    fish = 'fish'
+
+
+class Pet(BaseModel):
+    name: str = Field(..., description="The pet's name")
+    species: Species
+    age: Optional[int] = Field(None, description='Age in years', ge=0)
+    vaccinated: Optional[bool] = False
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/type_alias.py 0.45.0-1/tests/data/expected/main/jsonschema/type_alias.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/type_alias.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/type_alias.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,34 @@
+# generated by datamodel-codegen:
+#   filename:  type_alias.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Any, List, Optional, Union
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAlias
+
+Model: TypeAlias = Any
+
+
+SimpleString: TypeAlias = str
+
+
+UnionType: TypeAlias = Union[str, int]
+
+
+ArrayType: TypeAlias = List[str]
+
+
+AnnotatedType: TypeAlias = Annotated[
+    Union[str, bool],
+    Field(..., description='An annotated union type', title='MyAnnotatedType'),
+]
+
+
+class ModelWithTypeAliasField(BaseModel):
+    simple_field: Optional[SimpleString] = None
+    union_field: Optional[UnionType] = None
+    array_field: Optional[ArrayType] = None
+    annotated_field: Optional[AnnotatedType] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/type_alias_cycle_keep_model_order.py 0.45.0-1/tests/data/expected/main/jsonschema/type_alias_cycle_keep_model_order.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/type_alias_cycle_keep_model_order.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/type_alias_cycle_keep_model_order.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  type_alias_cycle.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import TypeAlias
+
+A: TypeAlias = list["B"]
+
+
+B: TypeAlias = list[A]
+
+
+Model: TypeAlias = A | B
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/type_alias_forward_ref_keep_model_order.py 0.45.0-1/tests/data/expected/main/jsonschema/type_alias_forward_ref_keep_model_order.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/type_alias_forward_ref_keep_model_order.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/type_alias_forward_ref_keep_model_order.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  type_alias_forward_ref.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, TypeAlias, TypedDict
+
+
+class BlobPart(TypedDict):
+    type: Literal['blob']
+    data: str
+
+
+FieldPlaceholder: TypeAlias = None
+
+
+class TextPart(TypedDict):
+    type: Literal['text']
+    content: str
+
+
+SystemInstructions: TypeAlias = list[TextPart | BlobPart]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/type_alias_py312.py 0.45.0-1/tests/data/expected/main/jsonschema/type_alias_py312.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/type_alias_py312.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/type_alias_py312.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  type_alias.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Any, List, Optional, Union
+
+from pydantic import BaseModel, Field
+
+type Model = Any
+
+
+type SimpleString = str
+
+
+type UnionType = Union[str, int]
+
+
+type ArrayType = List[str]
+
+
+type AnnotatedType = Annotated[
+    Union[str, bool],
+    Field(..., description='An annotated union type', title='MyAnnotatedType'),
+]
+
+
+class ModelWithTypeAliasField(BaseModel):
+    simple_field: Optional[SimpleString] = None
+    union_field: Optional[UnionType] = None
+    array_field: Optional[ArrayType] = None
+    annotated_field: Optional[AnnotatedType] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/type_alias_with_circular_ref_to_class_msgspec.py 0.45.0-1/tests/data/expected/main/jsonschema/type_alias_with_circular_ref_to_class_msgspec.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/type_alias_with_circular_ref_to_class_msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/type_alias_with_circular_ref_to_class_msgspec.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  type_alias_with_circular_ref_to_class.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Union
+
+from msgspec import Struct
+from typing_extensions import TypeAlias
+
+
+class Defaults(Struct):
+    a: List[Span]
+
+
+class SpanB(Struct):
+    recur: List[Span]
+
+
+Either: TypeAlias = Union[SpanB, "Span"]
+
+
+class Span(Struct):
+    recur: List[Either]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/type_alias_with_field_description.py 0.45.0-1/tests/data/expected/main/jsonschema/type_alias_with_field_description.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/type_alias_with_field_description.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/type_alias_with_field_description.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,36 @@
+# generated by datamodel-codegen:
+#   filename:  type_alias.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Any, List, Optional, Union
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAlias
+
+Model: TypeAlias = Any
+
+
+SimpleString: TypeAlias = str
+
+
+UnionType: TypeAlias = Union[str, int]
+
+
+ArrayType: TypeAlias = List[str]
+
+
+AnnotatedType: TypeAlias = Annotated[
+    Union[str, bool], Field(..., title='MyAnnotatedType')
+]
+"""
+An annotated union type
+"""
+
+
+class ModelWithTypeAliasField(BaseModel):
+    simple_field: Optional[SimpleString] = None
+    union_field: Optional[UnionType] = None
+    array_field: Optional[ArrayType] = None
+    annotated_field: Optional[AnnotatedType] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/type_alias_with_field_description_py312.py 0.45.0-1/tests/data/expected/main/jsonschema/type_alias_with_field_description_py312.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/type_alias_with_field_description_py312.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/type_alias_with_field_description_py312.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  type_alias.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Any, List, Optional, Union
+
+from pydantic import BaseModel, Field
+
+type Model = Any
+
+
+type SimpleString = str
+
+
+type UnionType = Union[str, int]
+
+
+type ArrayType = List[str]
+
+
+type AnnotatedType = Annotated[Union[str, bool], Field(..., title='MyAnnotatedType')]
+"""
+An annotated union type
+"""
+
+
+class ModelWithTypeAliasField(BaseModel):
+    simple_field: Optional[SimpleString] = None
+    union_field: Optional[UnionType] = None
+    array_field: Optional[ArrayType] = None
+    annotated_field: Optional[AnnotatedType] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/type_mappings.py 0.45.0-1/tests/data/expected/main/jsonschema/type_mappings.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/type_mappings.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/type_mappings.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  type_mappings.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import Base64Str, BaseModel, Field
+
+
+class BlobModel(BaseModel):
+    content: str = Field(
+        ..., description='Binary content that should be mapped to string'
+    )
+    data: Base64Str = Field(..., description='Base64 encoded data')
+    name: str = Field(..., description='Regular string field')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/type_mappings_to_boolean.py 0.45.0-1/tests/data/expected/main/jsonschema/type_mappings_to_boolean.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/type_mappings_to_boolean.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/type_mappings_to_boolean.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  type_mappings.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import Base64Str, BaseModel, Field
+
+
+class BlobModel(BaseModel):
+    content: bool = Field(
+        ..., description='Binary content that should be mapped to string'
+    )
+    data: Base64Str = Field(..., description='Base64 encoded data')
+    name: str = Field(..., description='Regular string field')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/type_mappings_to_integer.py 0.45.0-1/tests/data/expected/main/jsonschema/type_mappings_to_integer.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/type_mappings_to_integer.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/type_mappings_to_integer.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  type_mappings.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import Base64Str, BaseModel, Field
+
+
+class BlobModel(BaseModel):
+    content: int = Field(
+        ..., description='Binary content that should be mapped to string'
+    )
+    data: Base64Str = Field(..., description='Base64 encoded data')
+    name: str = Field(..., description='Regular string field')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/typed_dict_const.py 0.45.0-1/tests/data/expected/main/jsonschema/typed_dict_const.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/typed_dict_const.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/typed_dict_const.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  const.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, TypedDict
+
+
+class Const(TypedDict):
+    foo: Literal['foo']
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/typed_dict_not_required_nullable.py 0.45.0-1/tests/data/expected/main/jsonschema/typed_dict_not_required_nullable.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/typed_dict_not_required_nullable.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/typed_dict_not_required_nullable.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  not_required_nullable.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import NotRequired, Optional, TypedDict
+
+
+class Person(TypedDict):
+    name: str
+    null_name: NotRequired[Optional[str]]
+    age: NotRequired[int]
+    null_age: Optional[int]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/typed_dict_special_field_name_with_inheritance_model.py 0.45.0-1/tests/data/expected/main/jsonschema/typed_dict_special_field_name_with_inheritance_model.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/typed_dict_special_field_name_with_inheritance_model.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/typed_dict_special_field_name_with_inheritance_model.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,30 @@
+# generated by datamodel-codegen:
+#   filename:  special_field_name_with_inheritance_model.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import NotRequired, TypedDict
+
+
+class NestedBase(TypedDict):
+    age: NotRequired[str]
+
+
+class Base(NestedBase):
+    name: NotRequired[str]
+
+
+SpecialField = TypedDict(
+    'SpecialField',
+    {
+        'age': NotRequired[str],
+        'name': NotRequired[str],
+        'global': NotRequired[str],
+        'with': NotRequired[str],
+        'class': NotRequired[int],
+        'class\'s': NotRequired[int],
+        'class-s': NotRequired[str],
+        '#': NotRequired[str],
+    },
+)
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/typed_dict_with_only_additional_properties.py 0.45.0-1/tests/data/expected/main/jsonschema/typed_dict_with_only_additional_properties.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/typed_dict_with_only_additional_properties.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/typed_dict_with_only_additional_properties.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+# generated by datamodel-codegen:
+#   filename:  string_dict.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, TypeAlias
+
+MyStringDict: TypeAlias = Dict[str, str]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_default_with_const.py 0.45.0-1/tests/data/expected/main/jsonschema/use_default_with_const.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_default_with_const.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_default_with_const.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  use_default_with_const.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal
+
+from pydantic import BaseModel
+
+
+class UseDefaultWithConst(BaseModel):
+    foo: Literal['foo'] = 'foo'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_frozen_field_dataclass.py 0.45.0-1/tests/data/expected/main/jsonschema/use_frozen_field_dataclass.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_frozen_field_dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_frozen_field_dataclass.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  use_frozen_field.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Optional
+
+
+@dataclass
+class User:
+    id: int
+    name: str
+    password: str
+    created_at: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_frozen_field_no_readonly.py 0.45.0-1/tests/data/expected/main/jsonschema/use_frozen_field_no_readonly.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_frozen_field_no_readonly.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_frozen_field_no_readonly.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  user.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class Model(RootModel[Any]):
+    root: Any
+
+
+class User(BaseModel):
+    name: Optional[str] = Field(None, examples=['ken'])
+    pets: List[User] = Field(default_factory=list)
+
+
+class Pet(BaseModel):
+    name: Optional[str] = Field(None, examples=['dog', 'cat'])
+
+
+User.model_rebuild()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_frozen_field_v1.py 0.45.0-1/tests/data/expected/main/jsonschema/use_frozen_field_v1.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_frozen_field_v1.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_frozen_field_v1.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  use_frozen_field.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import datetime
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class User(BaseModel):
+    class Config:
+        validate_assignment = True
+
+    id: int = Field(..., allow_mutation=False, description='Server-generated ID')
+    name: str
+    password: str = Field(..., description='User password')
+    created_at: Optional[datetime] = Field(None, allow_mutation=False)
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_frozen_field_v2.py 0.45.0-1/tests/data/expected/main/jsonschema/use_frozen_field_v2.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_frozen_field_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_frozen_field_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  use_frozen_field.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AwareDatetime, BaseModel, Field
+
+
+class User(BaseModel):
+    id: int = Field(..., description='Server-generated ID', frozen=True)
+    name: str
+    password: str = Field(..., description='User password')
+    created_at: Optional[AwareDatetime] = Field(None, frozen=True)
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/_internal.py 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/_internal.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import BaseModel, Field, conint
+
+from .definitions import food as food_1
+from .definitions import friends as friends_1
+from .definitions.drink import coffee, tea
+from .definitions.relative.animal.pet import pet as pet_1
+from .definitions.relative.animal.pet.pet import Pet
+
+
+class Robot(Pet):
+    friends: Person | None = None
+    drink: coffee.Coffee | None = None
+    food: food_1.Noodle | None = None
+    pet: pet_1.Pet | None = None
+
+
+class Person(BaseModel):
+    first_name: str = Field(..., description="The person's first name.")
+    last_name: str = Field(..., description="The person's last name.")
+    age: conint(ge=0) | None = Field(None, description='Age in years.')
+    pets: List[pet_1.Pet] | None = None
+    friends: friends_1.Friends | None = None
+    robot: Robot | None = None
+    comment: None = None
+    drink: List[coffee.Coffee | tea.Tea] | None = None
+    food: List[food_1.Noodle | food_1.Soup] | None = None
+
+
+Person.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/coffee.py 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/coffee.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/coffee.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/coffee.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/drink/coffee.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class Coffee(Enum):
+    Black = 'Black'
+    Espresso = 'Espresso'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/tea.py 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/tea.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/tea.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/drink/tea.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/drink/tea.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class Tea(Enum):
+    Oolong = 'Oolong'
+    Green = 'Green'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/food.py 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/food.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/food.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/food.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/food.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class Noodle(Enum):
+    ramen = 'ramen'
+    spaghetti = 'spaghetti'
+
+
+class Soup(Enum):
+    bean = 'bean'
+    mushroom = 'mushroom'
+    tomato = 'tomato'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/friends.py 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/friends.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/friends.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/friends.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/friends.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import BaseModel, Extra, Field
+
+from . import food as food_1
+
+
+class Friend(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    name: str = Field(..., example='John Doe')
+    phone_number: str | None = Field(None, example='(555) 555-1234')
+    food: List[food_1.Noodle | food_1.Soup] | None = None
+
+
+class Friends(BaseModel):
+    __root__: List[Friend] = Field(..., title='Friends')
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/machine/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/machine/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/machine/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/machine/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/machine/robot.py 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/machine/robot.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/machine/robot.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/machine/robot.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from ..._internal import Robot
+
+__all__ = ["Robot"]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/fur.py 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/fur.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/fur.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/fur.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/relative/animal/fur.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class Fur(Enum):
+    Short_hair = 'Short hair'
+    Long_hair = 'Long hair'
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/pet/__init__.py 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/pet/__init__.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/pet/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/pet/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/pet/pet.py 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/pet/pet.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/pet/pet.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/definitions/relative/animal/pet/pet.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  definitions/relative/animal/pet/pet.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+from .. import fur as fur_1
+
+
+class Pet(BaseModel):
+    name: str | None = None
+    age: int | None = None
+    fur: fur_1.Fur | None = None
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/person.py 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/person.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/use_union_operator/person.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/use_union_operator/person.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  external_files_in_directory
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from ._internal import Person
+
+__all__ = ["Person"]
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/with_anchor.py 0.45.0-1/tests/data/expected/main/jsonschema/with_anchor.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/with_anchor.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/with_anchor.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  with_anchor.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Address(BaseModel):
+    street: str
+
+
+class Model(BaseModel):
+    billing_address: Address
diff -pruN 0.26.4-3/tests/data/expected/main/jsonschema/without_titles_use_title_as_name.py 0.45.0-1/tests/data/expected/main/jsonschema/without_titles_use_title_as_name.py
--- 0.26.4-3/tests/data/expected/main/jsonschema/without_titles_use_title_as_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/jsonschema/without_titles_use_title_as_name.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,53 @@
+# generated by datamodel-codegen:
+#   filename:  without_titles.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class ProcessingStatus(Enum):
+    COMPLETED = 'COMPLETED'
+    PENDING = 'PENDING'
+    FAILED = 'FAILED'
+
+
+class Kind(BaseModel):
+    __root__: str
+
+
+class ExtendedProcessingTask1(BaseModel):
+    comment: Optional[str] = None
+
+
+class ProcessingStatusUnion(BaseModel):
+    id: Optional[int] = None
+    description: Optional[str] = None
+
+
+class Model(BaseModel):
+    __root__: List[ProcessingTask]
+
+
+class ExtendedProcessingTask(BaseModel):
+    __root__: Union[Model, ExtendedProcessingTask1]
+
+
+class ExtendedProcessingTasks(BaseModel):
+    __root__: List[ExtendedProcessingTask]
+
+
+class ProcessingTask(BaseModel):
+    processing_status_union: Optional[
+        Union[ProcessingStatusUnion, ExtendedProcessingTask, ProcessingStatus]
+    ] = 'COMPLETED'
+    processing_status: Optional[ProcessingStatus] = 'COMPLETED'
+    name: Optional[str] = None
+    kind: Optional[Kind] = None
+
+
+Model.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/main_jsonschema_with_custom_formatters/output.py 0.45.0-1/tests/data/expected/main/main_jsonschema_with_custom_formatters/output.py
--- 0.26.4-3/tests/data/expected/main/main_jsonschema_with_custom_formatters/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/main_jsonschema_with_custom_formatters/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+# MIT License
+# 
+# Copyright (c) 2023 Blah-blah
+# 
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel, Field, conint
+
+
+class Person(BaseModel):
+    firstName: Optional[str] = Field(None, description="The person's first name.")
+    lastName: Optional[str] = Field(None, description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[List[Any]] = None
+    comment: None = None
diff -pruN 0.26.4-3/tests/data/expected/main/no_use_specialized_enum.py 0.45.0-1/tests/data/expected/main/no_use_specialized_enum.py
--- 0.26.4-3/tests/data/expected/main/no_use_specialized_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/no_use_specialized_enum.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  string_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class Model(Enum):
+    A = 'A'
+    B = 'B'
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/additional_properties.py 0.45.0-1/tests/data/expected/main/openapi/additional_properties.py
--- 0.26.4-3/tests/data/expected/main/openapi/additional_properties.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/additional_properties.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,110 @@
+# generated by datamodel-codegen:
+#   filename:  additional_properties.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, List, Optional
+
+from pydantic import BaseModel, Extra, Field
+
+
+class Pet(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    __root__: str
+
+
+class Rules(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    code: int
+    message: str
+
+
+class Event(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    event: Optional[Event] = None
+
+
+class Broken(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    foo: Optional[str] = None
+    bar: Optional[int] = None
+
+
+class BrokenArray(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    broken: Optional[Dict[str, List[Broken]]] = None
+
+
+class FileSetUpload(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    task_id: Optional[str] = Field(None, title='task id')
+    tags: Dict[str, List[str]] = Field(
+        ..., title='Dict of tags, each containing a list of file names'
+    )
+
+
+class Test(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    broken: Optional[Dict[str, Broken]] = None
+    failing: Optional[Dict[str, str]] = {}
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/all_exports_no_child/__init__.py 0.45.0-1/tests/data/expected/main/openapi/all_exports_no_child/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/all_exports_no_child/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/all_exports_no_child/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+# generated by datamodel-codegen:
+#   filename:  all_exports_no_child.yaml
+
+from __future__ import annotations
+
+from .parent import ParentModel
+
+__all__ = [
+    "ParentModel",
+]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/all_exports_no_child/parent/__init__.py 0.45.0-1/tests/data/expected/main/openapi/all_exports_no_child/parent/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/all_exports_no_child/parent/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/all_exports_no_child/parent/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  all_exports_no_child.yaml
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ParentModel(BaseModel):
+    id: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/all_exports_no_child/parent/child/__init__.py 0.45.0-1/tests/data/expected/main/openapi/all_exports_no_child/parent/child/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/all_exports_no_child/parent/child/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/all_exports_no_child/parent/child/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+# generated by datamodel-codegen:
+#   filename:  all_exports_no_child.yaml
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/all_exports_no_child/parent/child/deep.py 0.45.0-1/tests/data/expected/main/openapi/all_exports_no_child/parent/child/deep.py
--- 0.26.4-3/tests/data/expected/main/openapi/all_exports_no_child/parent/child/deep.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/all_exports_no_child/parent/child/deep.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  all_exports_no_child.yaml
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class DeepModel(BaseModel):
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/all_exports_with_local_models/__init__.py 0.45.0-1/tests/data/expected/main/openapi/all_exports_with_local_models/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/all_exports_with_local_models/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/all_exports_with_local_models/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  all_exports_with_local_models.yaml
+
+from __future__ import annotations
+
+from .pkg import AnotherLocal, LocalModel
+
+__all__ = [
+    "AnotherLocal",
+    "LocalModel",
+]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/all_exports_with_local_models/pkg/__init__.py 0.45.0-1/tests/data/expected/main/openapi/all_exports_with_local_models/pkg/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/all_exports_with_local_models/pkg/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/all_exports_with_local_models/pkg/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  all_exports_with_local_models.yaml
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .sub import ChildModel
+
+__all__ = [
+    "AnotherLocal",
+    "ChildModel",
+    "LocalModel",
+]
+
+
+class LocalModel(BaseModel):
+    id: Optional[str] = None
+
+
+class AnotherLocal(BaseModel):
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/all_exports_with_local_models/pkg/sub.py 0.45.0-1/tests/data/expected/main/openapi/all_exports_with_local_models/pkg/sub.py
--- 0.26.4-3/tests/data/expected/main/openapi/all_exports_with_local_models/pkg/sub.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/all_exports_with_local_models/pkg/sub.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  all_exports_with_local_models.yaml
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class ChildModel(BaseModel):
+    value: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/all_of_with_relative_ref.py 0.45.0-1/tests/data/expected/main/openapi/all_of_with_relative_ref.py
--- 0.26.4-3/tests/data/expected/main/openapi/all_of_with_relative_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/all_of_with_relative_ref.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,43 @@
+# generated by datamodel-codegen:
+#   filename:  openapi.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel, ConfigDict, RootModel
+
+
+class Animal(BaseModel):
+    model_config = ConfigDict(
+        extra='forbid',
+    )
+    kind: Optional[Kind] = None
+    """
+    The kind of the animal
+    """
+
+
+class Animals(RootModel[Animal]):
+    root: Animal
+
+
+class Kind(Enum):
+    CAT = 'CAT'
+    DOG = 'DOG'
+
+
+class Pet(BaseModel):
+    model_config = ConfigDict(
+        extra='forbid',
+    )
+    kind: Optional[Kind] = None
+    """
+    The kind of the pet
+    """
+
+
+class Pets(RootModel[Pet]):
+    root: Pet
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_enum_ref.py 0.45.0-1/tests/data/expected/main/openapi/allof_enum_ref.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_enum_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_enum_ref.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  allof_enum_ref.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class MassUnit(Enum):
+    g = 'g'
+    kg = 'kg'
+    t = 't'
+
+
+class QuantityTrunc(BaseModel):
+    __root__: MassUnit
+
+
+class CreateOrderByEstimateRequest(BaseModel):
+    quantity_trunc: Optional[QuantityTrunc] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_materialize_defaults.py 0.45.0-1/tests/data/expected/main/openapi/allof_materialize_defaults.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_materialize_defaults.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_materialize_defaults.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  allof_materialize_defaults.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, conint, constr
+
+
+class Parent(BaseModel):
+    name: Optional[constr(min_length=1)] = 'parent_default'
+    count: Optional[conint(ge=0)] = 10
+
+
+class Child(Parent):
+    name: Optional[constr(min_length=1, max_length=100)] = 'parent_default'
+    count: Optional[conint(ge=0, le=1000)] = 10
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_merge_mode_none.py 0.45.0-1/tests/data/expected/main/openapi/allof_merge_mode_none.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_merge_mode_none.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_merge_mode_none.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  allof_merge_mode_none.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, constr
+
+
+class Parent(BaseModel):
+    name: Optional[constr(min_length=1)] = 'parent_default'
+
+
+class Child(Parent):
+    name: Optional[constr(min_length=1)] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_multiple_parents_same_property.py 0.45.0-1/tests/data/expected/main/openapi/allof_multiple_parents_same_property.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_multiple_parents_same_property.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_multiple_parents_same_property.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  allof_multiple_parents_same_property.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, conint, constr
+
+
+class Parent1(BaseModel):
+    name: Optional[constr(min_length=1)] = None
+
+
+class Parent2(BaseModel):
+    name: Optional[constr(min_length=5)] = None
+    age: Optional[conint(ge=0)] = None
+
+
+class Child(BaseModel):
+    name: Optional[constr(min_length=5, max_length=100)] = None
+    age: Optional[conint(ge=0, le=150)] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_parent_bool_property.py 0.45.0-1/tests/data/expected/main/openapi/allof_parent_bool_property.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_parent_bool_property.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_parent_bool_property.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  allof_parent_bool_property.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel, constr
+
+
+class Parent(BaseModel):
+    name: Optional[constr(min_length=1)] = None
+    allowed: Optional[Any] = None
+
+
+class Child(Parent):
+    name: Optional[constr(min_length=1, max_length=100)] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_parent_no_properties.py 0.45.0-1/tests/data/expected/main/openapi/allof_parent_no_properties.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_parent_no_properties.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_parent_no_properties.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  allof_parent_no_properties.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, constr
+
+
+class EmptyParent(BaseModel):
+    pass
+
+
+class Child(EmptyParent):
+    name: Optional[constr(max_length=100)] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_partial_override_array_items.py 0.45.0-1/tests/data/expected/main/openapi/allof_partial_override_array_items.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_partial_override_array_items.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_partial_override_array_items.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  allof_partial_override_array_items.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    type: Optional[str] = 'playground:Thing'
+    type_list: Optional[List[str]] = ['playground:Thing']
+
+
+class Person(Thing):
+    type: Optional[str] = 'playground:Person'
+    type_list: Optional[List[str]] = ['playground:Person']
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_partial_override_array_items_no_parent.py 0.45.0-1/tests/data/expected/main/openapi/allof_partial_override_array_items_no_parent.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_partial_override_array_items_no_parent.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_partial_override_array_items_no_parent.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  allof_partial_override_array_items_no_parent.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    name: Optional[str] = None
+
+
+class Person(Thing):
+    tags: Optional[List[Any]] = ['tag1']
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_partial_override_deeply_nested_array.py 0.45.0-1/tests/data/expected/main/openapi/allof_partial_override_deeply_nested_array.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_partial_override_deeply_nested_array.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_partial_override_deeply_nested_array.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  allof_partial_override_deeply_nested_array.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    cube: Optional[List[List[List[str]]]] = [[['a']]]
+
+
+class Person(Thing):
+    cube: Optional[List[List[List[str]]]] = [[['b']]]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_partial_override_inherited_types.py 0.45.0-1/tests/data/expected/main/openapi/allof_partial_override_inherited_types.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_partial_override_inherited_types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_partial_override_inherited_types.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  allof_partial_override_inherited_types.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Thing(BaseModel):
+    type: Optional[str] = None
+    name: Optional[str] = Field(None, description='First and Last name')
+
+
+class Person(Thing):
+    type: Optional[str] = 'playground:Person'
+    name: str = Field(..., description='First and Last name')
+    age: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_partial_override_nested_array_items.py 0.45.0-1/tests/data/expected/main/openapi/allof_partial_override_nested_array_items.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_partial_override_nested_array_items.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_partial_override_nested_array_items.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  allof_partial_override_nested_array_items.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    matrix: Optional[List[List[str]]] = [['a', 'b']]
+
+
+class Person(Thing):
+    matrix: Optional[List[List[str]]] = [['c', 'd']]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_partial_override_non_array_field.py 0.45.0-1/tests/data/expected/main/openapi/allof_partial_override_non_array_field.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_partial_override_non_array_field.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_partial_override_non_array_field.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  allof_partial_override_non_array_field.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Thing(BaseModel):
+    name: Optional[str] = 'default_name'
+    count: Optional[int] = 0
+
+
+class Person(Thing):
+    name: Optional[str] = Field(None, title='Person name')
+    count: Optional[int] = Field(None, description='Count value')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_partial_override_simple_list_any.py 0.45.0-1/tests/data/expected/main/openapi/allof_partial_override_simple_list_any.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_partial_override_simple_list_any.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_partial_override_simple_list_any.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  allof_partial_override_simple_list_any.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Parent(BaseModel):
+    items: Optional[List[str]] = None
+
+
+class Child(Parent):
+    items: Optional[List[str]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_partial_override_unique_items.py 0.45.0-1/tests/data/expected/main/openapi/allof_partial_override_unique_items.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_partial_override_unique_items.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_partial_override_unique_items.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  allof_partial_override_unique_items.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Set
+
+from pydantic import BaseModel, Field
+
+
+class Thing(BaseModel):
+    tags: Optional[Set[str]] = Field(None, unique_items=True)
+
+
+class Person(Thing):
+    tags: Optional[Set[str]] = Field({'tag2'}, unique_items=True)
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_partial_override_unique_items_pydantic_v2.py 0.45.0-1/tests/data/expected/main/openapi/allof_partial_override_unique_items_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_partial_override_unique_items_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_partial_override_unique_items_pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  allof_partial_override_unique_items.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Set
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    tags: Optional[Set[str]] = None
+
+
+class Person(Thing):
+    tags: Optional[Set[str]] = {'tag2'}
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_property_bool_schema.py 0.45.0-1/tests/data/expected/main/openapi/allof_property_bool_schema.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_property_bool_schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_property_bool_schema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  allof_property_bool_schema.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel, constr
+
+
+class Parent(BaseModel):
+    name: Optional[constr(min_length=1)] = None
+
+
+class Child(Parent):
+    name: Optional[constr(min_length=1, max_length=100)] = None
+    allowed: Optional[Any] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_required.py 0.45.0-1/tests/data/expected/main/openapi/allof_required.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_required.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_required.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  allof_required.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, constr
+
+
+class Foo(BaseModel):
+    a: str
+    b: str
+
+
+class Bar(Foo):
+    type: constr(regex=r'service')
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_with_anyof_ref.py 0.45.0-1/tests/data/expected/main/openapi/allof_with_anyof_ref.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_with_anyof_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_with_anyof_ref.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,48 @@
+# generated by datamodel-codegen:
+#   filename:  allof_with_anyof_ref.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Union
+
+from pydantic import BaseModel, RootModel
+
+
+class ItemType(Enum):
+    text = 'text'
+
+
+class TextItem(BaseModel):
+    itemType: ItemType
+    text: str
+
+
+class ItemType1(Enum):
+    number = 'number'
+
+
+class NumberItem(BaseModel):
+    itemType: ItemType1
+    value: int
+
+
+class Item(RootModel[Union[TextItem, NumberItem]]):
+    root: Union[TextItem, NumberItem]
+
+
+class ItemPostRequest1(BaseModel):
+    itemId: str
+
+
+class ItemPostRequest2(TextItem, ItemPostRequest1):
+    pass
+
+
+class ItemPostRequest3(NumberItem, ItemPostRequest1):
+    pass
+
+
+class ItemPostRequest(RootModel[Union[ItemPostRequest2, ItemPostRequest3]]):
+    root: Union[ItemPostRequest2, ItemPostRequest3]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_with_oneof_ref.py 0.45.0-1/tests/data/expected/main/openapi/allof_with_oneof_ref.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_with_oneof_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_with_oneof_ref.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,48 @@
+# generated by datamodel-codegen:
+#   filename:  allof_with_oneof_ref.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal, Union
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class UserType(Enum):
+    admin = 'admin'
+
+
+class AdminUser(BaseModel):
+    userType: Literal['admin']
+    adminLevel: int
+
+
+class UserType1(Enum):
+    regular = 'regular'
+
+
+class RegularUser(BaseModel):
+    userType: Literal['regular']
+    username: str
+
+
+class User(RootModel[Union[AdminUser, RegularUser]]):
+    root: Union[AdminUser, RegularUser] = Field(..., discriminator='userType')
+
+
+class UserPostRequest1(BaseModel):
+    userId: str
+
+
+class UserPostRequest2(AdminUser, UserPostRequest1):
+    pass
+
+
+class UserPostRequest3(RegularUser, UserPostRequest1):
+    pass
+
+
+class UserPostRequest(RootModel[Union[UserPostRequest2, UserPostRequest3]]):
+    root: Union[UserPostRequest2, UserPostRequest3]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_with_required_inherited_complex_allof.py 0.45.0-1/tests/data/expected/main/openapi/allof_with_required_inherited_complex_allof.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_with_required_inherited_complex_allof.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_with_required_inherited_complex_allof.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,55 @@
+# generated by datamodel-codegen:
+#   filename:  allof_with_required_inherited_complex_allof.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, Optional
+
+from pydantic import BaseModel, confloat, constr
+
+
+class StringConstraint(BaseModel):
+    __root__: constr(min_length=1)
+
+
+class NumberConstraint(BaseModel):
+    __root__: confloat(ge=0.0)
+
+
+class BaseConfig(BaseModel):
+    name: Optional[str] = None
+    enabled: Optional[bool] = None
+
+
+class ExtendedConfig(BaseModel):
+    timeout: Optional[int] = None
+
+
+class Score(BaseModel):
+    pass
+
+
+class Config(BaseConfig, ExtendedConfig):
+    pass
+
+
+class Metadata(BaseModel):
+    pass
+
+
+class ProjectedItem(BaseModel):
+    id: Optional[int] = None
+    code: Optional[StringConstraint] = None
+    score: Optional[Score] = None
+    config: Optional[Config] = None
+    metadata: Optional[Metadata] = None
+
+
+class Item(ProjectedItem):
+    extra: Optional[str] = None
+    id: int
+    code: StringConstraint
+    score: confloat(ge=0.0, le=100.0)
+    config: BaseConfig
+    metadata: Dict[str, str]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_with_required_inherited_comprehensive.py 0.45.0-1/tests/data/expected/main/openapi/allof_with_required_inherited_comprehensive.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_with_required_inherited_comprehensive.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_with_required_inherited_comprehensive.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,92 @@
+# generated by datamodel-codegen:
+#   filename:  allof_with_required_inherited_comprehensive.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel, constr
+
+
+class Status(Enum):
+    active = 'active'
+    inactive = 'inactive'
+
+
+class BaseType(BaseModel):
+    id: Optional[int] = None
+
+
+class ObjectWithProps(BaseModel):
+    nested: Optional[str] = None
+
+
+class AllofPrimitivesWithConstraints(BaseModel):
+    pass
+
+
+class AllofWithPattern(BaseModel):
+    pass
+
+
+class AllofWithUnique(BaseModel):
+    pass
+
+
+class Level2(BaseModel):
+    level3: Optional[str] = None
+
+
+class Level1(BaseModel):
+    level2: Optional[Level2] = None
+
+
+class DeepNested(BaseModel):
+    level1: Optional[Level1] = None
+
+
+class ProjectedEntity(BaseModel):
+    primitive_string: Optional[str] = None
+    primitive_int: Optional[int] = None
+    primitive_number: Optional[float] = None
+    primitive_bool: Optional[bool] = None
+    ref_field: Optional[BaseType] = None
+    enum_field: Optional[Status] = None
+    array_with_ref: Optional[List[BaseType]] = None
+    array_with_primitive: Optional[List[str]] = None
+    object_with_props: Optional[ObjectWithProps] = None
+    object_with_additional: Optional[Dict[str, int]] = None
+    anyof_field: Optional[Union[str, int]] = None
+    oneof_field: Optional[Union[bool, float]] = None
+    allof_single_ref: Optional[BaseType] = None
+    allof_multiple_refs: Optional[BaseType] = None
+    allof_primitives_with_constraints: Optional[AllofPrimitivesWithConstraints] = None
+    allof_with_pattern: Optional[AllofWithPattern] = None
+    allof_with_unique: Optional[AllofWithUnique] = None
+    type_list: Optional[str] = None
+    deep_nested: Optional[DeepNested] = None
+
+
+class Entity(ProjectedEntity):
+    extra: Optional[str] = None
+    primitive_string: str
+    primitive_int: int
+    primitive_number: float
+    primitive_bool: bool
+    ref_field: BaseType
+    enum_field: Status
+    array_with_ref: List[BaseType]
+    array_with_primitive: List[str]
+    object_with_props: Dict[str, Any]
+    object_with_additional: Dict[str, int]
+    anyof_field: Union[str, int]
+    oneof_field: Union[bool, float]
+    allof_single_ref: BaseType
+    allof_multiple_refs: BaseType
+    allof_primitives_with_constraints: constr(min_length=5, max_length=100)
+    allof_with_pattern: constr(regex=r'(?=^[a-z]+)(?=[0-9]$)')
+    allof_with_unique: List[str]
+    type_list: Optional[str]
+    deep_nested: Dict[str, Any]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_with_required_inherited_coverage.py 0.45.0-1/tests/data/expected/main/openapi/allof_with_required_inherited_coverage.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_with_required_inherited_coverage.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_with_required_inherited_coverage.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,172 @@
+# generated by datamodel-codegen:
+#   filename:  allof_with_required_inherited_coverage.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel, conint
+
+
+class ConstraintOnlyBase(BaseModel):
+    field_with_constraints: Optional[Any] = None
+
+
+class MultipleField(BaseModel):
+    pass
+
+
+class MultipleOfBase(BaseModel):
+    multiple_field: Optional[MultipleField] = None
+
+
+class SimpleString(BaseModel):
+    __root__: str
+
+
+class NestedAnyOfWithRef(BaseModel):
+    nested_anyof: Optional[Union[SimpleString, int]] = None
+
+
+class L4(BaseModel):
+    l5: Optional[str] = None
+
+
+class L3(BaseModel):
+    l4: Optional[L4] = None
+
+
+class L2(BaseModel):
+    l3: Optional[L3] = None
+
+
+class L1(BaseModel):
+    l2: Optional[L2] = None
+
+
+class DeepArrayItem(BaseModel):
+    l1: Optional[L1] = None
+
+
+class DeepNestedArray(BaseModel):
+    deep_array: Optional[List[DeepArrayItem]] = None
+
+
+class OneOfBase(BaseModel):
+    oneof_field: Optional[Union[str, int]] = None
+
+
+class SingleOneOf(BaseModel):
+    single_oneof: Optional[str] = None
+
+
+class L31(BaseModel):
+    l4: Optional[str] = None
+
+
+class L21(BaseModel):
+    l3: Optional[L31] = None
+
+
+class L11(BaseModel):
+    l2: Optional[L21] = None
+
+
+class DeepDict(BaseModel):
+    l1: Optional[L11] = None
+
+
+class AdditionalPropsDeep(BaseModel):
+    deep_dict: Optional[Dict[str, DeepDict]] = None
+
+
+class ObjectAllof(BaseModel):
+    a: Optional[str] = None
+    b: Optional[int] = None
+
+
+class ObjectOnlyAllOf(BaseModel):
+    object_allof: Optional[ObjectAllof] = None
+
+
+class LargeUnion(BaseModel):
+    large_union: Optional[Union[str, int, bool, float, List[str], Dict[str, Any]]] = (
+        None
+    )
+
+
+class BaseWithNoProperties(BaseModel):
+    pass
+
+
+class BaseWithBooleanProperty(BaseModel):
+    bool_prop: Optional[bool] = None
+
+
+class ChildOfNoProps(BaseWithNoProperties):
+    extra: Optional[str] = None
+    some_field: Any
+
+
+class NestedAllOfRef(BaseModel):
+    nested_allof_ref: Optional[SimpleString] = None
+
+
+class NestedIndirect1(BaseModel):
+    pass
+
+
+class NestedIndirect2(NestedIndirect1):
+    pass
+
+
+class NestedIndirect(BaseModel):
+    __root__: NestedIndirect2
+
+
+class NestedAllOfWithoutDirectRef(BaseModel):
+    nested_indirect: Optional[NestedIndirect] = None
+
+
+class EnumField(BaseModel):
+    pass
+
+
+class EnumInAllOf(BaseModel):
+    enum_field: Optional[EnumField] = None
+
+
+class RefWithPrimitiveBase(BaseModel):
+    ref_and_primitive: Optional[SimpleString] = None
+
+
+class EdgeCasesCoverage(
+    ConstraintOnlyBase,
+    MultipleOfBase,
+    RefWithPrimitiveBase,
+    NestedAnyOfWithRef,
+    DeepNestedArray,
+    OneOfBase,
+    SingleOneOf,
+    AdditionalPropsDeep,
+    ObjectOnlyAllOf,
+    LargeUnion,
+    NestedAllOfRef,
+    NestedAllOfWithoutDirectRef,
+    EnumInAllOf,
+):
+    local_field: Optional[str] = None
+    field_with_constraints: Any
+    multiple_field: conint(multiple_of=5)
+    ref_and_primitive: SimpleString
+    nested_anyof: Union[SimpleString, int]
+    deep_array: List[Dict[str, Any]]
+    oneof_field: Union[str, int]
+    single_oneof: str
+    deep_dict: Dict[str, Dict[str, Any]]
+    object_allof: Dict[str, Any]
+    large_union: Any
+    nested_allof_ref: SimpleString
+    nested_indirect: Any
+    enum_field: Any
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_with_required_inherited_edge_cases.py 0.45.0-1/tests/data/expected/main/openapi/allof_with_required_inherited_edge_cases.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_with_required_inherited_edge_cases.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_with_required_inherited_edge_cases.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,98 @@
+# generated by datamodel-codegen:
+#   filename:  allof_with_required_inherited_edge_cases.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel, conint
+
+
+class BaseRef(BaseModel):
+    id: Optional[int] = None
+
+
+class StatusEnum(Enum):
+    active = 'active'
+    inactive = 'inactive'
+
+
+class SingleAllofPrimitive(BaseModel):
+    pass
+
+
+class AllofNestedAnyof(BaseModel):
+    pass
+
+
+class AllofConstraintOnly(BaseModel):
+    pass
+
+
+class AllofMaxConstraints(BaseModel):
+    pass
+
+
+class AllofUniqueItems(BaseModel):
+    pass
+
+
+class ObjectWithoutAdditional(BaseModel):
+    nested: Optional[str] = None
+
+
+class MultipleAdditionalProps(BaseModel):
+    pass
+
+
+class L3(BaseModel):
+    l4: Optional[str] = None
+
+
+class L2(BaseModel):
+    l3: Optional[L3] = None
+
+
+class L1(BaseModel):
+    l2: Optional[L2] = None
+
+
+class DepthLimitTest(BaseModel):
+    l1: Optional[L1] = None
+
+
+class ProjectedEdgeCases(BaseModel):
+    single_allof_primitive: Optional[SingleAllofPrimitive] = None
+    single_allof_ref: Optional[BaseRef] = None
+    allof_with_enum: Optional[StatusEnum] = None
+    allof_nested_anyof: Optional[AllofNestedAnyof] = None
+    allof_constraint_only: Optional[AllofConstraintOnly] = None
+    allof_max_constraints: Optional[AllofMaxConstraints] = None
+    allof_unique_items: Optional[AllofUniqueItems] = None
+    object_without_additional: Optional[ObjectWithoutAdditional] = None
+    object_only_type: Optional[Dict[str, Any]] = None
+    multiple_additional_props: Optional[MultipleAdditionalProps] = None
+    depth_limit_test: Optional[DepthLimitTest] = None
+    cycle_detection: Optional[BaseRef] = None
+    type_list_field: Optional[Union[str, int]] = None
+    allof_multiple_refs_only: Optional[BaseRef] = None
+
+
+class EdgeCases(ProjectedEdgeCases):
+    extra: Optional[str] = None
+    single_allof_primitive: str
+    single_allof_ref: BaseRef
+    allof_with_enum: StatusEnum
+    allof_nested_anyof: Union[str, int]
+    allof_constraint_only: Any
+    allof_max_constraints: conint(le=50)
+    allof_unique_items: List[str]
+    object_without_additional: Dict[str, Any]
+    object_only_type: Dict[str, Any]
+    multiple_additional_props: Dict[str, BaseRef]
+    depth_limit_test: Dict[str, Any]
+    cycle_detection: BaseRef
+    type_list_field: Union[str, int]
+    allof_multiple_refs_only: BaseRef
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_with_required_inherited_fields.py 0.45.0-1/tests/data/expected/main/openapi/allof_with_required_inherited_fields.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_with_required_inherited_fields.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_with_required_inherited_fields.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  allof_with_required_inherited_fields.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class ProjectedPet(BaseModel):
+    id: Optional[int] = None
+    name: Optional[str] = None
+
+
+class Pet(ProjectedPet):
+    tag: Optional[str] = None
+    opts: List[str]
+    id: int
+    name: str
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_with_required_inherited_fields_force_optional.py 0.45.0-1/tests/data/expected/main/openapi/allof_with_required_inherited_fields_force_optional.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_with_required_inherited_fields_force_optional.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_with_required_inherited_fields_force_optional.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  allof_with_required_inherited_fields.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class ProjectedPet(BaseModel):
+    id: Optional[int] = None
+    name: Optional[str] = None
+
+
+class Pet(ProjectedPet):
+    tag: Optional[str] = None
+    opts: Optional[List[str]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allof_with_required_inherited_nested_object.py 0.45.0-1/tests/data/expected/main/openapi/allof_with_required_inherited_nested_object.py
--- 0.26.4-3/tests/data/expected/main/openapi/allof_with_required_inherited_nested_object.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allof_with_required_inherited_nested_object.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,40 @@
+# generated by datamodel-codegen:
+#   filename:  allof_with_required_inherited_nested_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Address(BaseModel):
+    street: Optional[str] = None
+    city: Optional[str] = None
+
+
+class ContactInfo(BaseModel):
+    email: Optional[str] = None
+    phone: Optional[str] = None
+
+
+class ProjectedPerson(BaseModel):
+    id: Optional[int] = None
+    name: Optional[str] = None
+    address: Optional[Address] = None
+    metadata: Optional[Dict[str, str]] = None
+    contact: Optional[ContactInfo] = None
+    tags: Optional[List[str]] = None
+    priority: Optional[Union[int, str]] = None
+
+
+class Person(ProjectedPerson):
+    nickname: Optional[str] = None
+    id: int
+    name: str
+    address: Dict[str, Any]
+    metadata: Dict[str, str]
+    contact: ContactInfo
+    tags: List[str]
+    priority: Union[int, str]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allow_extra_fields.py 0.45.0-1/tests/data/expected/main/openapi/allow_extra_fields.py
--- 0.26.4-3/tests/data/expected/main/openapi/allow_extra_fields.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allow_extra_fields.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,102 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Extra, Field
+
+
+class Pet(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    __root__: str
+
+
+class Rules(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allow_extra_fields_pydantic_v2.py 0.45.0-1/tests/data/expected/main/openapi/allow_extra_fields_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/allow_extra_fields_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allow_extra_fields_pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,87 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, ConfigDict, Field, RootModel
+
+
+class Pet(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(RootModel[List[Pet]]):
+    root: List[Pet]
+
+
+class User(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(RootModel[List[User]]):
+    root: List[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[List[str]]):
+    root: List[str]
+
+
+class Error(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[List[Api]]):
+    root: List[Api]
+
+
+class Event(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    model_config = ConfigDict(
+        extra='allow',
+    )
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allow_population_by_field_name.py 0.45.0-1/tests/data/expected/main/openapi/allow_population_by_field_name.py
--- 0.26.4-3/tests/data/expected/main/openapi/allow_population_by_field_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allow_population_by_field_name.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,102 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    __root__: str
+
+
+class Rules(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    class Config:
+        allow_population_by_field_name = True
+
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/allow_population_by_field_name_pydantic_v2.py 0.45.0-1/tests/data/expected/main/openapi/allow_population_by_field_name_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/allow_population_by_field_name_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/allow_population_by_field_name_pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,102 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, ConfigDict, Field, RootModel
+
+
+class Pet(BaseModel):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(RootModel[List[Pet]]):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    root: List[Pet]
+
+
+class User(BaseModel):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(RootModel[List[User]]):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    root: List[User]
+
+
+class Id(RootModel[str]):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    root: str
+
+
+class Rules(RootModel[List[str]]):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    root: List[str]
+
+
+class Error(BaseModel):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[List[Api]]):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    root: List[Api]
+
+
+class Event(BaseModel):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    model_config = ConfigDict(
+        populate_by_name=True,
+    )
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/base_class.py 0.45.0-1/tests/data/expected/main/openapi/base_class.py
--- 0.26.4-3/tests/data/expected/main/openapi/base_class.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/base_class.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,71 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, Field
+
+from custom_module import Base
+
+
+class Pet(Base):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(Base):
+    __root__: List[Pet]
+
+
+class User(Base):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(Base):
+    __root__: List[User]
+
+
+class Id(Base):
+    __root__: str
+
+
+class Rules(Base):
+    __root__: List[str]
+
+
+class Error(Base):
+    code: int
+    message: str
+
+
+class Api(Base):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(Base):
+    __root__: List[Api]
+
+
+class Event(Base):
+    name: Optional[str] = None
+
+
+class Result(Base):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/body_and_parameters/general.py 0.45.0-1/tests/data/expected/main/openapi/body_and_parameters/general.py
--- 0.26.4-3/tests/data/expected/main/openapi/body_and_parameters/general.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/body_and_parameters/general.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,66 @@
+# generated by datamodel-codegen:
+#   filename:  body_and_parameters.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import datetime
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class PetForm(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+
+
+class PetsGetResponse(BaseModel):
+    __root__: List[Pet]
+
+
+class FoodFoodIdGetResponse(BaseModel):
+    __root__: List[int]
+
+
+class UserGetResponse(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UserPostRequest(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersGetResponseItem(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersGetResponse(BaseModel):
+    __root__: List[UsersGetResponseItem]
+
+
+class UsersPostRequestItem(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersPostRequest(BaseModel):
+    __root__: List[UsersPostRequestItem]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/body_and_parameters/only_paths.py 0.45.0-1/tests/data/expected/main/openapi/body_and_parameters/only_paths.py
--- 0.26.4-3/tests/data/expected/main/openapi/body_and_parameters/only_paths.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/body_and_parameters/only_paths.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,66 @@
+# generated by datamodel-codegen:
+#   filename:  body_and_parameters.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import datetime
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class FoodFoodIdGetResponse(BaseModel):
+    __root__: List[int]
+
+
+class UserGetResponse(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UserPostRequest(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersGetResponseItem(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersGetResponse(BaseModel):
+    __root__: List[UsersGetResponseItem]
+
+
+class UsersPostRequestItem(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersPostRequest(BaseModel):
+    __root__: List[UsersPostRequestItem]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class PetForm(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+
+
+class PetsGetResponse(BaseModel):
+    __root__: List[Pet]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/body_and_parameters/only_schemas.py 0.45.0-1/tests/data/expected/main/openapi/body_and_parameters/only_schemas.py
--- 0.26.4-3/tests/data/expected/main/openapi/body_and_parameters/only_schemas.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/body_and_parameters/only_schemas.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  body_and_parameters.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class PetForm(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/body_and_parameters/remote_ref.py 0.45.0-1/tests/data/expected/main/openapi/body_and_parameters/remote_ref.py
--- 0.26.4-3/tests/data/expected/main/openapi/body_and_parameters/remote_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/body_and_parameters/remote_ref.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  body_and_parameters_remote_ref.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class PetForm(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+
+
+class PetsGetResponse(BaseModel):
+    __root__: List[Pet]
+
+
+class FoodFoodIdGetResponse(BaseModel):
+    __root__: List[int]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/byte_format.py 0.45.0-1/tests/data/expected/main/openapi/byte_format.py
--- 0.26.4-3/tests/data/expected/main/openapi/byte_format.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/byte_format.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  byte_format.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import Base64Str, BaseModel, Field, RootModel
+
+
+class Data(RootModel[Base64Str]):
+    root: Base64Str = Field(..., description='The version of this API')
+
+
+class Api(BaseModel):
+    data: Data
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_acyclic/__init__.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_acyclic/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_acyclic/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_acyclic/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_acyclic.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class RootModel(BaseModel):
+    id: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_acyclic/sub.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_acyclic/sub.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_acyclic/sub.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_acyclic/sub.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_acyclic.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import RootModel
+
+
+class ChildModel(BaseModel):
+    id: Optional[str] = None
+    parent: Optional[RootModel] = None
+
+
+class AnotherChild(BaseModel):
+    sibling: Optional[ChildModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_class_conflict/__init__.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_class_conflict/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_class_conflict/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_class_conflict/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_class_conflict.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import Item
+
+__all__ = ["Item"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_class_conflict/_internal.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_class_conflict/_internal.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_class_conflict/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_class_conflict/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,49 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Item(BaseModel):
+    id: Optional[str] = None
+    sub_item: Optional[Authorization] = None
+
+
+class Item_1(BaseModel):
+    id: Optional[str] = None
+
+
+class Invoice(BaseModel):
+    id: Optional[str] = None
+    billing_item: Optional[Item_1] = None
+    session: Optional[Session] = None
+
+
+class Item_2(BaseModel):
+    id: Optional[str] = None
+
+
+class Session(BaseModel):
+    id: Optional[str] = None
+    checkout_item: Optional[Item_2] = None
+    root_item: Optional[Item] = None
+
+
+class Item_3(BaseModel):
+    id: Optional[str] = None
+
+
+class Authorization(BaseModel):
+    id: Optional[str] = None
+    item: Optional[Item_3] = None
+    invoice: Optional[Invoice] = None
+
+
+Item.update_forward_refs()
+Invoice.update_forward_refs()
+Authorization.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_class_conflict/billing.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_class_conflict/billing.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_class_conflict/billing.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_class_conflict/billing.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_class_conflict.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import Invoice
+from ._internal import Item_1 as Item
+
+__all__ = ["Invoice", "Item"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_class_conflict/checkout.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_class_conflict/checkout.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_class_conflict/checkout.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_class_conflict/checkout.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_class_conflict.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import Item_2 as Item
+from ._internal import Session
+
+__all__ = ["Item", "Session"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_class_conflict/issuing.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_class_conflict/issuing.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_class_conflict/issuing.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_class_conflict/issuing.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_class_conflict.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import Authorization
+from ._internal import Item_3 as Item
+
+__all__ = ["Authorization", "Item"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_different_prefixes/__init__.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_different_prefixes/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_different_prefixes/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_different_prefixes/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_different_prefixes.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import Root
+
+__all__ = ["Root"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_different_prefixes/_internal.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_different_prefixes/_internal.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_different_prefixes/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_different_prefixes/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,34 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Root(BaseModel):
+    id: Optional[str] = None
+    alpha_ref: Optional[Model] = None
+
+
+class Model(BaseModel):
+    id: Optional[str] = None
+    beta_ref: Optional[Model_1] = None
+
+
+class Model_1(BaseModel):
+    id: Optional[str] = None
+    gamma_ref: Optional[Model_2] = None
+
+
+class Model_2(BaseModel):
+    id: Optional[str] = None
+    root_ref: Optional[Root] = None
+
+
+Root.update_forward_refs()
+Model.update_forward_refs()
+Model_1.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_different_prefixes/alpha/__init__.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_different_prefixes/alpha/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_different_prefixes/alpha/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_different_prefixes/alpha/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_different_prefixes.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_different_prefixes/alpha/sub.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_different_prefixes/alpha/sub.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_different_prefixes/alpha/sub.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_different_prefixes/alpha/sub.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_different_prefixes.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import Model
+
+__all__ = ["Model"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_different_prefixes/beta/__init__.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_different_prefixes/beta/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_different_prefixes/beta/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_different_prefixes/beta/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_different_prefixes.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_different_prefixes/beta/sub.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_different_prefixes/beta/sub.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_different_prefixes/beta/sub.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_different_prefixes/beta/sub.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_different_prefixes.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import Model_1 as Model
+
+__all__ = ["Model"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_different_prefixes/gamma/__init__.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_different_prefixes/gamma/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_different_prefixes/gamma/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_different_prefixes/gamma/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_different_prefixes.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_different_prefixes/gamma/sub.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_different_prefixes/gamma/sub.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_different_prefixes/gamma/sub.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_different_prefixes/gamma/sub.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_different_prefixes.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import Model_2 as Model
+
+__all__ = ["Model"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/__init__.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_mixed_prefixes.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import Root
+
+__all__ = ["Root"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/_internal.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/_internal.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,40 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Root(BaseModel):
+    id: Optional[str] = None
+    a_ref: Optional[Model] = None
+
+
+class Model(BaseModel):
+    id: Optional[str] = None
+    b_ref: Optional[Model_1] = None
+
+
+class Model_1(BaseModel):
+    id: Optional[str] = None
+    c_ref: Optional[Model_2] = None
+
+
+class Model_2(BaseModel):
+    id: Optional[str] = None
+    other_ref: Optional[Model_3] = None
+
+
+class Model_3(BaseModel):
+    id: Optional[str] = None
+    root_ref: Optional[Root] = None
+
+
+Root.update_forward_refs()
+Model.update_forward_refs()
+Model_1.update_forward_refs()
+Model_2.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/common/__init__.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/common/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/common/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/common/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_mixed_prefixes.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/common/a.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/common/a.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/common/a.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/common/a.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_mixed_prefixes.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import Model
+
+__all__ = ["Model"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/common/b.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/common/b.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/common/b.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/common/b.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_mixed_prefixes.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import Model_1 as Model
+
+__all__ = ["Model"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/common/c.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/common/c.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/common/c.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/common/c.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_mixed_prefixes.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import Model_2 as Model
+
+__all__ = ["Model"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/other/__init__.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/other/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/other/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/other/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_mixed_prefixes.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/other/x.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/other/x.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/other/x.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_mixed_prefixes/other/x.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_mixed_prefixes.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import Model_3 as Model
+
+__all__ = ["Model"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_small_cycle/__init__.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_small_cycle/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_small_cycle/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_small_cycle/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_small_cycle.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import Parent
+
+__all__ = ["Parent"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_small_cycle/_internal.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_small_cycle/_internal.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_small_cycle/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_small_cycle/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,22 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Parent(BaseModel):
+    id: Optional[str] = None
+    child: Optional[Child] = None
+
+
+class Child(BaseModel):
+    id: Optional[str] = None
+    parent: Optional[Parent] = None
+
+
+Parent.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_small_cycle/sub.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_small_cycle/sub.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_small_cycle/sub.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_small_cycle/sub.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_small_cycle.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import Child
+
+__all__ = ["Child"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_stripe_like/__init__.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_stripe_like/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_stripe_like/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_stripe_like/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_stripe_like.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import BalanceTransaction
+
+__all__ = ["BalanceTransaction"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_stripe_like/_internal.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_stripe_like/_internal.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_stripe_like/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_stripe_like/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,53 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class BalanceTransaction(BaseModel):
+    id: Optional[str] = None
+    amount: Optional[int] = None
+    source: Optional[Authorization] = None
+
+
+class Subscription(BaseModel):
+    id: Optional[str] = None
+    plan: Optional[str] = None
+
+
+class Invoice(BaseModel):
+    id: Optional[str] = None
+    session: Optional[Session] = None
+    subscription: Optional[Subscription] = None
+
+
+class LineItem(BaseModel):
+    id: Optional[str] = None
+    price: Optional[int] = None
+
+
+class Session(BaseModel):
+    id: Optional[str] = None
+    transaction: Optional[BalanceTransaction] = None
+    line_items: Optional[List[LineItem]] = None
+
+
+class Cardholder(BaseModel):
+    id: Optional[str] = None
+    name: Optional[str] = None
+
+
+class Authorization(BaseModel):
+    id: Optional[str] = None
+    invoice: Optional[Invoice] = None
+    cardholder: Optional[Cardholder] = None
+
+
+BalanceTransaction.update_forward_refs()
+Invoice.update_forward_refs()
+Authorization.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_stripe_like/billing.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_stripe_like/billing.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_stripe_like/billing.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_stripe_like/billing.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_stripe_like.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import Invoice, Subscription
+
+__all__ = ["Invoice", "Subscription"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_stripe_like/checkout.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_stripe_like/checkout.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_stripe_like/checkout.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_stripe_like/checkout.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_stripe_like.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import LineItem, Session
+
+__all__ = ["LineItem", "Session"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_stripe_like/issuing.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_stripe_like/issuing.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_stripe_like/issuing.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_stripe_like/issuing.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_stripe_like.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import Authorization, Cardholder
+
+__all__ = ["Authorization", "Cardholder"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_with_inheritance/__init__.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_with_inheritance/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_with_inheritance/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_with_inheritance/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_with_inheritance.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import BaseEntity, RootModel
+
+__all__ = ["BaseEntity", "RootModel"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_with_inheritance/_internal.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_with_inheritance/_internal.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_with_inheritance/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_with_inheritance/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class BaseEntity(BaseModel):
+    id: Optional[str] = None
+    created_at: Optional[str] = None
+
+
+class RootModel(BaseModel):
+    id: Optional[str] = None
+    auth: Optional[Authorization] = None
+
+
+class Invoice(BaseEntity):
+    total: Optional[int] = None
+    session: Optional[Session] = None
+
+
+class Session(BaseEntity):
+    status: Optional[str] = None
+    root_ref: Optional[RootModel] = None
+
+
+class Authorization(BaseEntity):
+    amount: Optional[int] = None
+    invoice: Optional[Invoice] = None
+
+
+RootModel.update_forward_refs()
+Invoice.update_forward_refs()
+Authorization.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_with_inheritance/billing.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_with_inheritance/billing.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_with_inheritance/billing.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_with_inheritance/billing.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_with_inheritance.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import Invoice
+
+__all__ = ["Invoice"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_with_inheritance/checkout.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_with_inheritance/checkout.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_with_inheritance/checkout.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_with_inheritance/checkout.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_with_inheritance.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import Session
+
+__all__ = ["Session"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/circular_imports_with_inheritance/issuing.py 0.45.0-1/tests/data/expected/main/openapi/circular_imports_with_inheritance/issuing.py
--- 0.26.4-3/tests/data/expected/main/openapi/circular_imports_with_inheritance/issuing.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/circular_imports_with_inheritance/issuing.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  circular_imports_with_inheritance.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import Authorization
+
+__all__ = ["Authorization"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/collapse_root_models.py 0.45.0-1/tests/data/expected/main/openapi/collapse_root_models.py
--- 0.26.4-3/tests/data/expected/main/openapi/collapse_root_models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/collapse_root_models.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  not_real_string.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field, constr
+
+
+class Tweet(BaseModel):
+    author_id: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[str]
+
+
+class FileRequest(BaseModel):
+    file_hash: constr(regex=r'^[a-fA-F\d]{32}$', min_length=32, max_length=32) = Field(
+        ..., description='For file'
+    )
+
+
+class ImageRequest(BaseModel):
+    image_hash: Optional[
+        constr(regex=r'^[a-fA-F\d]{32}$', min_length=64, max_length=64)
+    ] = Field(None, description='For image')
+
+
+class FileHashes(BaseModel):
+    __root__: List[constr(regex=r'^[a-fA-F\d]{32}$', min_length=32, max_length=32)]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/collapse_root_models_field_constraints.py 0.45.0-1/tests/data/expected/main/openapi/collapse_root_models_field_constraints.py
--- 0.26.4-3/tests/data/expected/main/openapi/collapse_root_models_field_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/collapse_root_models_field_constraints.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,51 @@
+# generated by datamodel-codegen:
+#   filename:  not_real_string.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Tweet(BaseModel):
+    author_id: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[str]
+
+
+class FileHash(BaseModel):
+    __root__: str = Field(
+        ...,
+        description='For file',
+        max_length=32,
+        min_length=32,
+        regex='^[a-fA-F\\d]{32}$',
+    )
+
+
+class FileRequest(BaseModel):
+    file_hash: str = Field(
+        ...,
+        description='For file',
+        max_length=32,
+        min_length=32,
+        regex='^[a-fA-F\\d]{32}$',
+    )
+
+
+class ImageRequest(BaseModel):
+    image_hash: Optional[str] = Field(
+        None,
+        description='For image',
+        max_length=64,
+        min_length=64,
+        regex='^[a-fA-F\\d]{32}$',
+    )
+
+
+class FileHashes(BaseModel):
+    __root__: List[FileHash]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/collapse_root_models_with_references_to_flat_types.py 0.45.0-1/tests/data/expected/main/openapi/collapse_root_models_with_references_to_flat_types.py
--- 0.26.4-3/tests/data/expected/main/openapi/collapse_root_models_with_references_to_flat_types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/collapse_root_models_with_references_to_flat_types.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  flat_type.jsonschema
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class FooModel(BaseModel):
+    foo: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/complex_reference.py 0.45.0-1/tests/data/expected/main/openapi/complex_reference.py
--- 0.26.4-3/tests/data/expected/main/openapi/complex_reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/complex_reference.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,41 @@
+# generated by datamodel-codegen:
+#   filename:  complex_reference.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class A(BaseModel):
+    a_property: Optional[B1] = None
+
+
+class B1(A):
+    pass
+
+
+class B2(A):
+    pass
+
+
+class C1(B1):
+    pass
+
+
+class D1(C1):
+    pass
+
+
+class D1andB2(D1, B2):
+    pass
+
+
+A.update_forward_refs()
+B1.update_forward_refs()
+B2.update_forward_refs()
+C1.update_forward_refs()
+D1.update_forward_refs()
+D1andB2.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/const.py 0.45.0-1/tests/data/expected/main/openapi/const.py
--- 0.26.4-3/tests/data/expected/main/openapi/const.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/const.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  const.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Namespace(BaseModel):
+    apiVersion: str = Field('v1', const=True)
+    kind: str = Field('Namespace', const=True)
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/const_baseline.py 0.45.0-1/tests/data/expected/main/openapi/const_baseline.py
--- 0.26.4-3/tests/data/expected/main/openapi/const_baseline.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/const_baseline.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  const.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class ApiVersion(BaseModel):
+    __root__: str = Field('v1', const=True, description='The version of this API')
+
+
+class Api(BaseModel):
+    version: ApiVersion
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/const_field.py 0.45.0-1/tests/data/expected/main/openapi/const_field.py
--- 0.26.4-3/tests/data/expected/main/openapi/const_field.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/const_field.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  const.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class Api(BaseModel):
+    version: str = Field('v1', const=True, description='The version of this API')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/const_field_dataclass.py 0.45.0-1/tests/data/expected/main/openapi/const_field_dataclass.py
--- 0.26.4-3/tests/data/expected/main/openapi/const_field_dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/const_field_dataclass.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  const.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Literal
+
+
+@dataclass
+class Api:
+    version: Literal['v1']
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/const_field_msgspec.py 0.45.0-1/tests/data/expected/main/openapi/const_field_msgspec.py
--- 0.26.4-3/tests/data/expected/main/openapi/const_field_msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/const_field_msgspec.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  const.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Literal
+
+from msgspec import Meta, Struct
+
+
+class Api(Struct):
+    version: Annotated[Literal['v1'], Meta(description='The version of this API')]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/const_field_pydantic_v2.py 0.45.0-1/tests/data/expected/main/openapi/const_field_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/const_field_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/const_field_pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  const.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal
+
+from pydantic import BaseModel, Field
+
+
+class Api(BaseModel):
+    version: Literal['v1'] = Field(..., description='The version of this API')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/const_field_typed_dict.py 0.45.0-1/tests/data/expected/main/openapi/const_field_typed_dict.py
--- 0.26.4-3/tests/data/expected/main/openapi/const_field_typed_dict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/const_field_typed_dict.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  const.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, TypedDict
+
+
+class Api(TypedDict):
+    version: Literal['v1']
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/const_pydantic_v2.py 0.45.0-1/tests/data/expected/main/openapi/const_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/const_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/const_pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  const.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal
+
+from pydantic import BaseModel
+
+
+class Namespace(BaseModel):
+    apiVersion: Literal['v1']
+    kind: Literal['Namespace']
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/content_in_parameters.py 0.45.0-1/tests/data/expected/main/openapi/content_in_parameters.py
--- 0.26.4-3/tests/data/expected/main/openapi/content_in_parameters.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/content_in_parameters.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  content_in_parameters.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class PetForm(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/custom_file_header.py 0.45.0-1/tests/data/expected/main/openapi/custom_file_header.py
--- 0.26.4-3/tests/data/expected/main/openapi/custom_file_header.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/custom_file_header.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# multiline custom ;
+# header ;
+# file ;
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/custom_file_header_comments_only.py 0.45.0-1/tests/data/expected/main/openapi/custom_file_header_comments_only.py
--- 0.26.4-3/tests/data/expected/main/openapi/custom_file_header_comments_only.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/custom_file_header_comments_only.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,68 @@
+# Just a comment
+# Another comment
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/custom_file_header_empty.py 0.45.0-1/tests/data/expected/main/openapi/custom_file_header_empty.py
--- 0.26.4-3/tests/data/expected/main/openapi/custom_file_header_empty.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/custom_file_header_empty.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/custom_file_header_invalid_syntax.py 0.45.0-1/tests/data/expected/main/openapi/custom_file_header_invalid_syntax.py
--- 0.26.4-3/tests/data/expected/main/openapi/custom_file_header_invalid_syntax.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/custom_file_header_invalid_syntax.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+
+from __future__ import annotations
+
+# Valid comment
+def incomplete(
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/custom_file_header_no_future.py 0.45.0-1/tests/data/expected/main/openapi/custom_file_header_no_future.py
--- 0.26.4-3/tests/data/expected/main/openapi/custom_file_header_no_future.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/custom_file_header_no_future.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,68 @@
+# multiline custom ;
+# header ;
+# file ;
+
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/custom_file_header_with_docstring.py 0.45.0-1/tests/data/expected/main/openapi/custom_file_header_with_docstring.py
--- 0.26.4-3/tests/data/expected/main/openapi/custom_file_header_with_docstring.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/custom_file_header_with_docstring.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,70 @@
+"""Custom module docstring.
+
+This module contains generated models.
+"""
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/custom_file_header_with_docstring_and_import.py 0.45.0-1/tests/data/expected/main/openapi/custom_file_header_with_docstring_and_import.py
--- 0.26.4-3/tests/data/expected/main/openapi/custom_file_header_with_docstring_and_import.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/custom_file_header_with_docstring_and_import.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,74 @@
+"""Custom module docstring.
+
+This module contains generated models.
+"""
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/custom_file_header_with_import.py 0.45.0-1/tests/data/expected/main/openapi/custom_file_header_with_import.py
--- 0.26.4-3/tests/data/expected/main/openapi/custom_file_header_with_import.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/custom_file_header_with_import.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,71 @@
+# Custom header with import
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/custom_id_pydantic_v2.py 0.45.0-1/tests/data/expected/main/openapi/custom_id_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/custom_id_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/custom_id_pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  custom_id.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+from uuid import UUID
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class CustomId(RootModel[UUID]):
+    root: UUID = Field(..., description='My custom ID')
+
+
+class Model(BaseModel):
+    custom_id: Optional[CustomId] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/custom_id_pydantic_v2_custom_base.py 0.45.0-1/tests/data/expected/main/openapi/custom_id_pydantic_v2_custom_base.py
--- 0.26.4-3/tests/data/expected/main/openapi/custom_id_pydantic_v2_custom_base.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/custom_id_pydantic_v2_custom_base.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  custom_id.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+from uuid import UUID
+
+from pydantic import Field, RootModel
+
+from custom_base import Base
+
+
+class CustomId(RootModel[UUID]):
+    root: UUID = Field(..., description='My custom ID')
+
+
+class Model(Base):
+    custom_id: Optional[CustomId] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/custom_template_dir.py 0.45.0-1/tests/data/expected/main/openapi/custom_template_dir.py
--- 0.26.4-3/tests/data/expected/main/openapi/custom_template_dir.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/custom_template_dir.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,61 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):  # 1 2, 1 2, this is just a pet
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = None
+    apiVersionNumber: Optional[str] = None
+    apiUrl: Optional[AnyUrl] = None
+    apiDocumentationUrl: Optional[AnyUrl] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/dataclass.py 0.45.0-1/tests/data/expected/main/openapi/dataclass.py
--- 0.26.4-3/tests/data/expected/main/openapi/dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/dataclass.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,63 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import List, Optional
+
+from typing_extensions import TypeAlias
+
+
+@dataclass
+class Pet:
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+Pets: TypeAlias = List[Pet]
+
+
+@dataclass
+class User:
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+Users: TypeAlias = List[User]
+
+
+Id: TypeAlias = str
+
+
+Rules: TypeAlias = List[str]
+
+
+@dataclass
+class Error:
+    code: int
+    message: str
+
+
+@dataclass
+class Api:
+    apiKey: Optional[str] = None
+    apiVersionNumber: Optional[str] = None
+    apiUrl: Optional[str] = None
+    apiDocumentationUrl: Optional[str] = None
+
+
+Apis: TypeAlias = List[Api]
+
+
+@dataclass
+class Event:
+    name: Optional[str] = None
+
+
+@dataclass
+class Result:
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/dataclass_base_class.py 0.45.0-1/tests/data/expected/main/openapi/dataclass_base_class.py
--- 0.26.4-3/tests/data/expected/main/openapi/dataclass_base_class.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/dataclass_base_class.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,64 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import List, Optional
+
+from custom_base import Base
+from typing_extensions import TypeAlias
+
+
+@dataclass
+class Pet(Base):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+Pets: TypeAlias = List[Pet]
+
+
+@dataclass
+class User(Base):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+Users: TypeAlias = List[User]
+
+
+Id: TypeAlias = str
+
+
+Rules: TypeAlias = List[str]
+
+
+@dataclass
+class Error(Base):
+    code: int
+    message: str
+
+
+@dataclass
+class Api(Base):
+    apiKey: Optional[str] = None
+    apiVersionNumber: Optional[str] = None
+    apiUrl: Optional[str] = None
+    apiDocumentationUrl: Optional[str] = None
+
+
+Apis: TypeAlias = List[Api]
+
+
+@dataclass
+class Event(Base):
+    name: Optional[str] = None
+
+
+@dataclass
+class Result(Base):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/dataclass_inheritance_field_ordering_py310.py 0.45.0-1/tests/data/expected/main/openapi/dataclass_inheritance_field_ordering_py310.py
--- 0.26.4-3/tests/data/expected/main/openapi/dataclass_inheritance_field_ordering_py310.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/dataclass_inheritance_field_ordering_py310.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  dataclass_inheritance_field_ordering.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+from typing import Optional
+
+
+@dataclass
+class ParentWithDefault:
+    name: Optional[str] = 'default_name'
+    read_only_field: Optional[str] = None
+
+
+@dataclass
+class ChildWithRequired(ParentWithDefault):
+    child_id: str = field(kw_only=True)
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/dataclass_keyword_only.py 0.45.0-1/tests/data/expected/main/openapi/dataclass_keyword_only.py
--- 0.26.4-3/tests/data/expected/main/openapi/dataclass_keyword_only.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/dataclass_keyword_only.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  inheritance.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Optional
+
+
+@dataclass(kw_only=True)
+class Base:
+    id: str
+    createdAt: Optional[str] = None
+    version: Optional[float] = 1
+
+
+@dataclass(kw_only=True)
+class Child(Base):
+    title: str
+    url: Optional[str] = 'https://example.com'
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/datetime.py 0.45.0-1/tests/data/expected/main/openapi/datetime.py
--- 0.26.4-3/tests/data/expected/main/openapi/datetime.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/datetime.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  datetime.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import datetime
+
+from pydantic import BaseModel, Field
+
+
+class InventoryItem(BaseModel):
+    releaseDate: datetime = Field(..., example='2016-08-29T09:12:33.001Z')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/datetime_dataclass.py 0.45.0-1/tests/data/expected/main/openapi/datetime_dataclass.py
--- 0.26.4-3/tests/data/expected/main/openapi/datetime_dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/datetime_dataclass.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  datetime.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from datetime import datetime
+
+
+@dataclass
+class InventoryItem:
+    releaseDate: datetime
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/datetime_msgspec.py 0.45.0-1/tests/data/expected/main/openapi/datetime_msgspec.py
--- 0.26.4-3/tests/data/expected/main/openapi/datetime_msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/datetime_msgspec.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  datetime.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import datetime
+
+from msgspec import Struct
+
+
+class InventoryItem(Struct):
+    releaseDate: datetime
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/datetime_pydantic_v2.py 0.45.0-1/tests/data/expected/main/openapi/datetime_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/datetime_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/datetime_pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  datetime.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import AwareDatetime, BaseModel, Field
+
+
+class InventoryItem(BaseModel):
+    releaseDate: AwareDatetime = Field(..., examples=['2016-08-29T09:12:33.001Z'])
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/datetime_pydantic_v2_datetime.py 0.45.0-1/tests/data/expected/main/openapi/datetime_pydantic_v2_datetime.py
--- 0.26.4-3/tests/data/expected/main/openapi/datetime_pydantic_v2_datetime.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/datetime_pydantic_v2_datetime.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  datetime.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import datetime
+
+from pydantic import BaseModel, Field
+
+
+class InventoryItem(BaseModel):
+    releaseDate: datetime = Field(..., examples=['2016-08-29T09:12:33.001Z'])
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/default_object/Another.py 0.45.0-1/tests/data/expected/main/openapi/default_object/Another.py
--- 0.26.4-3/tests/data/expected/main/openapi/default_object/Another.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/default_object/Another.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+from . import Foo as Foo_1
+from . import Nested
+
+
+class Foo(BaseModel):
+    __root__: str
+
+
+class Bar(BaseModel):
+    original_foo: Optional[Foo_1] = Field(
+        default_factory=lambda: Foo_1.parse_obj({'text': 'abc', 'number': 123})
+    )
+    nested_foo: Optional[List[Nested.Foo]] = Field(
+        default_factory=lambda: [Nested.Foo.parse_obj(v) for v in ['abc', 'efg']]
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/default_object/Nested.py 0.45.0-1/tests/data/expected/main/openapi/default_object/Nested.py
--- 0.26.4-3/tests/data/expected/main/openapi/default_object/Nested.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/default_object/Nested.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,30 @@
+# generated by datamodel-codegen:
+#   filename:  default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+from . import Foo as Foo_1
+
+
+class Foo(BaseModel):
+    __root__: str
+
+
+class Bar(BaseModel):
+    foo: Optional[Foo_1] = Field(
+        default_factory=lambda: Foo_1.parse_obj({'text': 'abc', 'number': 123})
+    )
+    baz: Optional[List[Foo_1]] = Field(
+        default_factory=lambda: [
+            Foo_1.parse_obj(v)
+            for v in [{'text': 'abc', 'number': 123}, {'text': 'efg', 'number': 456}]
+        ]
+    )
+    nested_foo: Optional[Foo] = Field(
+        default_factory=lambda: Foo.parse_obj('default foo')
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/default_object/__init__.py 0.45.0-1/tests/data/expected/main/openapi/default_object/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/default_object/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/default_object/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Foo(BaseModel):
+    text: Optional[str] = '987'
+    number: Optional[float] = None
+
+
+class Bar(BaseModel):
+    foo: Optional[Foo] = Field(
+        default_factory=lambda: Foo.parse_obj({'text': 'abc', 'number': 123})
+    )
+    baz: Optional[List[Foo]] = Field(
+        default_factory=lambda: [
+            Foo.parse_obj(v)
+            for v in [{'text': 'abc', 'number': 123}, {'text': 'efg', 'number': 456}]
+        ]
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/disable_appending_item_suffix.py 0.45.0-1/tests/data/expected/main/openapi/disable_appending_item_suffix.py
--- 0.26.4-3/tests/data/expected/main/openapi/disable_appending_item_suffix.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/disable_appending_item_suffix.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int = Field(..., ge=0, le=9223372036854775807)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+
+
+class Pets(BaseModel):
+    __root__: List[Pet] = Field(..., max_items=10, min_items=1, unique_items=True)
+
+
+class UID(BaseModel):
+    __root__: int = Field(..., ge=0)
+
+
+class Phone(BaseModel):
+    __root__: str = Field(..., min_length=3)
+
+
+class Fax(BaseModel):
+    __root__: str = Field(..., min_length=3)
+
+
+class User(BaseModel):
+    id: int = Field(..., ge=0)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+    uid: UID
+    phones: Optional[List[Phone]] = Field(None, max_items=10)
+    fax: Optional[List[Fax]] = None
+    height: Optional[Union[int, float]] = Field(None, ge=1.0, le=300.0)
+    weight: Optional[Union[float, int]] = Field(None, ge=1.0, le=1000.0)
+    age: Optional[int] = Field(None, gt=0, le=200)
+    rating: Optional[float] = Field(None, gt=0.0, le=5.0)
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/disable_timestamp.py 0.45.0-1/tests/data/expected/main/openapi/disable_timestamp.py
--- 0.26.4-3/tests/data/expected/main/openapi/disable_timestamp.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/disable_timestamp.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,68 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/allof.py 0.45.0-1/tests/data/expected/main/openapi/discriminator/allof.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/allof.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/discriminator/allof.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_allof.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Literal
+
+from pydantic import BaseModel, Field
+
+
+class Pet(BaseModel):
+    pet_type: Annotated[str, Field(alias='petType')]
+
+
+class Cat(Pet):
+    name: str | None = None
+    pet_type: Literal['cat'] = Field(..., alias='petType')
+
+
+class Dog(Pet):
+    bark: str | None = None
+    pet_type: Literal['dog'] = Field(..., alias='petType')
+
+
+class Lizard(Pet):
+    loves_rocks: Annotated[bool | None, Field(alias='lovesRocks')] = None
+    pet_type: Literal['lizard'] = Field(..., alias='petType')
+
+
+class PetContainer(BaseModel):
+    pet: Annotated[Cat | Dog | Lizard, Field(discriminator='pet_type')]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/allof_no_subtypes.py 0.45.0-1/tests/data/expected/main/openapi/discriminator/allof_no_subtypes.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/allof_no_subtypes.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/discriminator/allof_no_subtypes.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_allof_no_subtypes.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class BaseItem(BaseModel):
+    itemType: str
+
+
+class FooItem(BaseModel):
+    fooValue: Optional[str] = None
+
+
+class BarItem(BaseModel):
+    barValue: Optional[int] = None
+
+
+class ItemContainer(BaseModel):
+    item: BaseItem = Field(..., discriminator='itemType')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/dataclass_enum_one_literal_as_default.py 0.45.0-1/tests/data/expected/main/openapi/discriminator/dataclass_enum_one_literal_as_default.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/dataclass_enum_one_literal_as_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/discriminator/dataclass_enum_one_literal_as_default.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,35 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_enum.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from enum import Enum
+from typing import Literal, Union
+
+from typing_extensions import TypeAlias
+
+
+class RequestVersionEnum(Enum):
+    v1 = 'v1'
+    v2 = 'v2'
+
+
+@dataclass
+class RequestBase:
+    version: RequestVersionEnum
+
+
+@dataclass
+class RequestV1(RequestBase):
+    request_id: str
+    version: Literal['v1'] = 'v1'
+
+
+@dataclass
+class RequestV2(RequestBase):
+    version: Literal['v2'] = 'v2'
+
+
+Request: TypeAlias = Union[RequestV1, RequestV2]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/dataclass_enum_one_literal_as_default_py310.py 0.45.0-1/tests/data/expected/main/openapi/discriminator/dataclass_enum_one_literal_as_default_py310.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/dataclass_enum_one_literal_as_default_py310.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/discriminator/dataclass_enum_one_literal_as_default_py310.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_enum.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+from enum import Enum
+from typing import Literal, TypeAlias, Union
+
+
+class RequestVersionEnum(Enum):
+    v1 = 'v1'
+    v2 = 'v2'
+
+
+@dataclass
+class RequestBase:
+    version: RequestVersionEnum
+
+
+@dataclass
+class RequestV1(RequestBase):
+    request_id: str = field(kw_only=True)
+    version: Literal['v1'] = 'v1'
+
+
+@dataclass
+class RequestV2(RequestBase):
+    version: Literal['v2'] = 'v2'
+
+
+Request: TypeAlias = Union[RequestV1, RequestV2]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum.py 0.45.0-1/tests/data/expected/main/openapi/discriminator/enum.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/discriminator/enum.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_enum.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal, Union
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class RequestVersionEnum(Enum):
+    v1 = 'v1'
+    v2 = 'v2'
+
+
+class RequestBase(BaseModel):
+    version: RequestVersionEnum
+
+
+class RequestV1(RequestBase):
+    request_id: str = Field(..., description='there is description', title='test title')
+    version: Literal['v1']
+
+
+class RequestV2(RequestBase):
+    version: Literal['v2']
+
+
+class Request(RootModel[Union[RequestV1, RequestV2]]):
+    root: Union[RequestV1, RequestV2] = Field(..., discriminator='version')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_duplicate.py 0.45.0-1/tests/data/expected/main/openapi/discriminator/enum_duplicate.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_duplicate.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/discriminator/enum_duplicate.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_enum_duplicate.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Cat(BaseModel):
+    pet_type: Literal['cat'] = Field(..., title='Pet Type')
+    meows: int = Field(..., title='Meows')
+
+
+class Dog(BaseModel):
+    pet_type: Literal['dog'] = Field(..., title='Pet Type')
+    barks: float = Field(..., title='Barks')
+
+
+class PetType(Enum):
+    reptile = 'reptile'
+    lizard = 'lizard'
+
+
+class Lizard(BaseModel):
+    pet_type: Literal['lizard', 'reptile'] = Field(..., title='Pet Type')
+    scales: bool = Field(..., title='Scales')
+
+
+class Animal(BaseModel):
+    pet: Optional[Union[Cat, Dog, Lizard]] = Field(
+        None, discriminator='pet_type', title='Pet'
+    )
+    n: Optional[int] = Field(None, title='N')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_one_literal_as_default.py 0.45.0-1/tests/data/expected/main/openapi/discriminator/enum_one_literal_as_default.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_one_literal_as_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/discriminator/enum_one_literal_as_default.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_enum.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal, Union
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class RequestVersionEnum(Enum):
+    v1 = 'v1'
+    v2 = 'v2'
+
+
+class RequestBase(BaseModel):
+    version: RequestVersionEnum
+
+
+class RequestV1(RequestBase):
+    request_id: str = Field(..., description='there is description', title='test title')
+    version: Literal['v1'] = 'v1'
+
+
+class RequestV2(RequestBase):
+    version: Literal['v2'] = 'v2'
+
+
+class Request(RootModel[Union[RequestV1, RequestV2]]):
+    root: Union[RequestV1, RequestV2] = Field(..., discriminator='version')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_single_value.py 0.45.0-1/tests/data/expected/main/openapi/discriminator/enum_single_value.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_single_value.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/discriminator/enum_single_value.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_enum_single_value.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class ToolType(Enum):
+    function = 'function'
+
+
+class ToolBase(BaseModel):
+    type: ToolType
+
+
+class FunctionToolCall(ToolBase):
+    id: str
+
+
+class ToolCall(RootModel[FunctionToolCall]):
+    root: FunctionToolCall = Field(..., discriminator='type')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_single_value_anyof.py 0.45.0-1/tests/data/expected/main/openapi/discriminator/enum_single_value_anyof.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_single_value_anyof.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/discriminator/enum_single_value_anyof.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_enum_single_value_anyof.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal, Union
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class ToolType(Enum):
+    function = 'function'
+
+
+class FunctionToolCall(BaseModel):
+    id: str
+    type: Literal['function']
+
+
+class CustomToolCall(BaseModel):
+    type: Literal['CustomToolCall']
+
+
+class ToolCallUnion(RootModel[Union[FunctionToolCall, CustomToolCall]]):
+    root: Union[FunctionToolCall, CustomToolCall] = Field(..., discriminator='type')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_single_value_anyof_use_enum.py 0.45.0-1/tests/data/expected/main/openapi/discriminator/enum_single_value_anyof_use_enum.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_single_value_anyof_use_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/discriminator/enum_single_value_anyof_use_enum.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_enum_single_value_anyof.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal, Union
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class ToolType(Enum):
+    function = 'function'
+
+
+class FunctionToolCall(BaseModel):
+    id: str
+    type: Literal[ToolType.function]
+
+
+class CustomToolCall(BaseModel):
+    type: Literal['CustomToolCall']
+
+
+class ToolCallUnion(RootModel[Union[FunctionToolCall, CustomToolCall]]):
+    root: Union[FunctionToolCall, CustomToolCall] = Field(..., discriminator='type')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_single_value_use_enum.py 0.45.0-1/tests/data/expected/main/openapi/discriminator/enum_single_value_use_enum.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_single_value_use_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/discriminator/enum_single_value_use_enum.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_enum_single_value.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class ToolType(Enum):
+    function = 'function'
+
+
+class ToolBase(BaseModel):
+    type: ToolType
+
+
+class FunctionToolCall(ToolBase):
+    id: str
+
+
+class ToolCall(RootModel[FunctionToolCall]):
+    root: FunctionToolCall = Field(..., discriminator='type')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_use_enum_values.py 0.45.0-1/tests/data/expected/main/openapi/discriminator/enum_use_enum_values.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_use_enum_values.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/discriminator/enum_use_enum_values.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_enum.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal, Union
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class RequestVersionEnum(Enum):
+    v1 = 'v1'
+    v2 = 'v2'
+
+
+class RequestBase(BaseModel):
+    version: RequestVersionEnum
+
+
+class RequestV1(RequestBase):
+    request_id: str = Field(..., description='there is description', title='test title')
+    version: Literal[RequestVersionEnum.v1]
+
+
+class RequestV2(RequestBase):
+    version: Literal[RequestVersionEnum.v2]
+
+
+class Request(RootModel[Union[RequestV1, RequestV2]]):
+    root: Union[RequestV1, RequestV2] = Field(..., discriminator='version')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_use_enum_values_sanitized.py 0.45.0-1/tests/data/expected/main/openapi/discriminator/enum_use_enum_values_sanitized.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/enum_use_enum_values_sanitized.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/discriminator/enum_use_enum_values_sanitized.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_enum_sanitized.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal, Union
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class RequestVersionEnum(Enum):
+    v1_beta = 'v1-beta'
+    v2 = 'v2'
+
+
+class RequestBase(BaseModel):
+    version: RequestVersionEnum
+
+
+class RequestV1(RequestBase):
+    request_id: str
+    version: Literal[RequestVersionEnum.v1_beta]
+
+
+class RequestV2(RequestBase):
+    version: Literal[RequestVersionEnum.v2]
+
+
+class Request(RootModel[Union[RequestV1, RequestV2]]):
+    root: Union[RequestV1, RequestV2] = Field(..., discriminator='version')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/general.py 0.45.0-1/tests/data/expected/main/openapi/discriminator/general.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/general.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/discriminator/general.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,36 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Type(Enum):
+    my_first_object = 'my_first_object'
+    my_second_object = 'my_second_object'
+    my_third_object = 'my_third_object'
+
+
+class ObjectBase(BaseModel):
+    name: Optional[str] = Field(None, description='Name of the object')
+    type: Literal['type1'] = Field(..., description='Object type')
+
+
+class CreateObjectRequest(ObjectBase):
+    name: str = Field(..., description='Name of the object')
+    type: Literal['type2'] = Field(..., description='Object type')
+
+
+class UpdateObjectRequest(ObjectBase):
+    type: Literal['type3']
+
+
+class Demo(BaseModel):
+    __root__: Union[ObjectBase, CreateObjectRequest, UpdateObjectRequest] = Field(
+        ..., discriminator='type'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/in_array.py 0.45.0-1/tests/data/expected/main/openapi/discriminator/in_array.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/in_array.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/discriminator/in_array.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_in_array.yaml
+#   timestamp: 2023-07-27T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Type(Enum):
+    my_first_object = 'my_first_object'
+    my_second_object = 'my_second_object'
+
+
+class ObjectBase(BaseModel):
+    name: Optional[str] = Field(None, description='Name of the object')
+    type: Literal['type1'] = Field(..., description='Object type')
+
+
+class CreateObjectRequest(ObjectBase):
+    name: str = Field(..., description='Name of the object')
+    type: Literal['type2'] = Field(..., description='Object type')
+
+
+class UpdateObjectRequest(ObjectBase):
+    type: Literal['type3']
+
+
+class MyArray(BaseModel):
+    __root__: Union[ObjectBase, CreateObjectRequest, UpdateObjectRequest] = Field(
+        ..., discriminator='type'
+    )
+
+
+class Demo(BaseModel):
+    myArray: List[MyArray]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/in_array_collapse_root_models.py 0.45.0-1/tests/data/expected/main/openapi/discriminator/in_array_collapse_root_models.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/in_array_collapse_root_models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/discriminator/in_array_collapse_root_models.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_in_array.yaml
+#   timestamp: 2023-07-27T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Type(Enum):
+    my_first_object = 'my_first_object'
+    my_second_object = 'my_second_object'
+
+
+class ObjectBase(BaseModel):
+    name: Optional[str] = Field(None, description='Name of the object')
+    type: Literal['type1'] = Field(..., description='Object type')
+
+
+class CreateObjectRequest(ObjectBase):
+    name: str = Field(..., description='Name of the object')
+    type: Literal['type2'] = Field(..., description='Object type')
+
+
+class UpdateObjectRequest(ObjectBase):
+    type: Literal['type3']
+
+
+class Demo(BaseModel):
+    myArray: List[Union[ObjectBase, CreateObjectRequest, UpdateObjectRequest]]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/with_properties.py 0.45.0-1/tests/data/expected/main/openapi/discriminator/with_properties.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/with_properties.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/discriminator/with_properties.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,44 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_with_properties.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional, Union
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class UserContextVariable(BaseModel):
+    accountId: str = Field(..., description='The account ID of the user.')
+    field_type: str = Field(
+        ..., alias='@type', description='Type of custom context variable.'
+    )
+
+
+class IssueContextVariable(BaseModel):
+    id: Optional[int] = Field(None, description='The issue ID.')
+    key: Optional[str] = Field(None, description='The issue key.')
+    field_type: str = Field(
+        ..., alias='@type', description='Type of custom context variable.'
+    )
+
+
+class CustomContextVariable1(UserContextVariable):
+    field_type: Literal['user'] = Field(
+        ..., alias='@type', description='Type of custom context variable.'
+    )
+
+
+class CustomContextVariable2(IssueContextVariable):
+    field_type: Literal['issue'] = Field(
+        ..., alias='@type', description='Type of custom context variable.'
+    )
+
+
+class CustomContextVariable(
+    RootModel[Union[CustomContextVariable1, CustomContextVariable2]]
+):
+    root: Union[CustomContextVariable1, CustomContextVariable2] = Field(
+        ..., discriminator='field_type'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/discriminator/without_mapping.py 0.45.0-1/tests/data/expected/main/openapi/discriminator/without_mapping.py
--- 0.26.4-3/tests/data/expected/main/openapi/discriminator/without_mapping.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/discriminator/without_mapping.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,36 @@
+# generated by datamodel-codegen:
+#   filename:  discriminator_without_mapping.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Type(Enum):
+    my_first_object = 'my_first_object'
+    my_second_object = 'my_second_object'
+    my_third_object = 'my_third_object'
+
+
+class ObjectBase(BaseModel):
+    name: Optional[str] = Field(None, description='Name of the object')
+    type: Literal['ObjectBase'] = Field(..., description='Object type')
+
+
+class CreateObjectRequest(ObjectBase):
+    name: str = Field(..., description='Name of the object')
+    type: Literal['CreateObjectRequest'] = Field(..., description='Object type')
+
+
+class UpdateObjectRequest(ObjectBase):
+    type: Literal['UpdateObjectRequest']
+
+
+class Demo(BaseModel):
+    __root__: Union[ObjectBase, CreateObjectRequest, UpdateObjectRequest] = Field(
+        ..., discriminator='type'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/dot_notation_deep_inheritance/__init__.py 0.45.0-1/tests/data/expected/main/openapi/dot_notation_deep_inheritance/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/dot_notation_deep_inheritance/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/dot_notation_deep_inheritance/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  dot_notation_deep_inheritance.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/dot_notation_deep_inheritance/v0/__init__.py 0.45.0-1/tests/data/expected/main/openapi/dot_notation_deep_inheritance/v0/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/dot_notation_deep_inheritance/v0/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/dot_notation_deep_inheritance/v0/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  dot_notation_deep_inheritance.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Animal(BaseModel):
+    species: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/dot_notation_deep_inheritance/v0/mammal/__init__.py 0.45.0-1/tests/data/expected/main/openapi/dot_notation_deep_inheritance/v0/mammal/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/dot_notation_deep_inheritance/v0/mammal/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/dot_notation_deep_inheritance/v0/mammal/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  dot_notation_deep_inheritance.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from .. import Animal
+
+
+class Dog(Animal):
+    breed: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/dot_notation_deep_inheritance/v0/mammal/canine.py 0.45.0-1/tests/data/expected/main/openapi/dot_notation_deep_inheritance/v0/mammal/canine.py
--- 0.26.4-3/tests/data/expected/main/openapi/dot_notation_deep_inheritance/v0/mammal/canine.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/dot_notation_deep_inheritance/v0/mammal/canine.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  dot_notation_deep_inheritance.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from .. import Animal
+
+
+class Puppy(Animal):
+    age_weeks: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/dot_notation_inheritance/__init__.py 0.45.0-1/tests/data/expected/main/openapi/dot_notation_inheritance/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/dot_notation_inheritance/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/dot_notation_inheritance/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  dot_notation_inheritance.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/dot_notation_inheritance/v0/__init__.py 0.45.0-1/tests/data/expected/main/openapi/dot_notation_inheritance/v0/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/dot_notation_inheritance/v0/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/dot_notation_inheritance/v0/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  dot_notation_inheritance.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Properties(BaseModel):
+    name: Optional[str] = None
+
+
+class Animal(BaseModel):
+    species: Optional[str] = None
+    props: Optional[Properties] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/dot_notation_inheritance/v0/animal.py 0.45.0-1/tests/data/expected/main/openapi/dot_notation_inheritance/v0/animal.py
--- 0.26.4-3/tests/data/expected/main/openapi/dot_notation_inheritance/v0/animal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/dot_notation_inheritance/v0/animal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  dot_notation_inheritance.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from . import Animal
+
+
+class Dog(Animal):
+    breed: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/duplicate_models2.py 0.45.0-1/tests/data/expected/main/openapi/duplicate_models2.py
--- 0.26.4-3/tests/data/expected/main/openapi/duplicate_models2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/duplicate_models2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,46 @@
+# generated by datamodel-codegen:
+#   filename:  duplicate_models2.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Optional
+
+from pydantic import BaseModel, RootModel
+
+
+class PetType(Enum):
+    pet = 'pet'
+
+
+class PetDetails(BaseModel):
+    race: Optional[str] = None
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    type: PetType
+    details: Optional[PetDetails] = None
+
+
+class CarType(Enum):
+    car = 'car'
+
+
+class CarDetails(BaseModel):
+    brand: Optional[str] = None
+
+
+class Car(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    type: CarType
+    details: Optional[CarDetails] = None
+
+
+class Cars(RootModel[Any]):
+    root: Any
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/empty_dict_default.py 0.45.0-1/tests/data/expected/main/openapi/empty_dict_default.py
--- 0.26.4-3/tests/data/expected/main/openapi/empty_dict_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/empty_dict_default.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  empty_dict_default.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class ObjectMeta(BaseModel):
+    name: Optional[str] = None
+    namespace: Optional[str] = None
+
+
+class PodSpec(BaseModel):
+    metadata: Optional[ObjectMeta] = Field(
+        default_factory=lambda: ObjectMeta.parse_obj({})
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/empty_list_default.py 0.45.0-1/tests/data/expected/main/openapi/empty_list_default.py
--- 0.26.4-3/tests/data/expected/main/openapi/empty_list_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/empty_list_default.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  empty_list_default.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Container(BaseModel):
+    name: Optional[str] = None
+
+
+class PodSpec(BaseModel):
+    containers: Optional[List[Container]] = Field(default_factory=list)
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/enable_command_header.py 0.45.0-1/tests/data/expected/main/openapi/enable_command_header.py
--- 0.26.4-3/tests/data/expected/main/openapi/enable_command_header.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/enable_command_header.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,70 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+#   command:   datamodel-codegen [COMMAND]
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/enable_faux_immutability.py 0.45.0-1/tests/data/expected/main/openapi/enable_faux_immutability.py
--- 0.26.4-3/tests/data/expected/main/openapi/enable_faux_immutability.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/enable_faux_immutability.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,102 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    __root__: str
+
+
+class Rules(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    class Config:
+        allow_mutation = False
+
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/enable_faux_immutability_pydantic_v2.py 0.45.0-1/tests/data/expected/main/openapi/enable_faux_immutability_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/enable_faux_immutability_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/enable_faux_immutability_pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,102 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, ConfigDict, Field, RootModel
+
+
+class Pet(BaseModel):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(RootModel[List[Pet]]):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    root: List[Pet]
+
+
+class User(BaseModel):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(RootModel[List[User]]):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    root: List[User]
+
+
+class Id(RootModel[str]):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    root: str
+
+
+class Rules(RootModel[List[str]]):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    root: List[str]
+
+
+class Error(BaseModel):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[List[Api]]):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    root: List[Api]
+
+
+class Event(BaseModel):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    model_config = ConfigDict(
+        frozen=True,
+    )
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/enable_version_header.py 0.45.0-1/tests/data/expected/main/openapi/enable_version_header.py
--- 0.26.4-3/tests/data/expected/main/openapi/enable_version_header.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/enable_version_header.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,70 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+#   version:   0.0.0
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/enum_builtin_conflict.py 0.45.0-1/tests/data/expected/main/openapi/enum_builtin_conflict.py
--- 0.26.4-3/tests/data/expected/main/openapi/enum_builtin_conflict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/enum_builtin_conflict.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  enum_builtin_conflict.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class StringMethodEnum(str, Enum):
+    count_ = 'count'
+    index_ = 'index'
+    format_ = 'format'
+    normal_value = 'normal_value'
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/enum_models/all.py 0.45.0-1/tests/data/expected/main/openapi/enum_models/all.py
--- 0.26.4-3/tests/data/expected/main/openapi/enum_models/all.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/enum_models/all.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,72 @@
+# generated by datamodel-codegen:
+#   filename:  enum_models.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    kind: Optional[Literal['dog', 'cat']] = None
+    type: Optional[Literal['animal']] = None
+    number: Literal[1]
+    boolean: Literal[True]
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class Animal(BaseModel):
+    kind: Optional[Literal['snake', 'rabbit']] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class EnumObject(BaseModel):
+    type: Optional[Literal['a', 'b']] = None
+
+
+class EnumRoot(BaseModel):
+    __root__: Literal['a', 'b']
+
+
+class IntEnum(BaseModel):
+    __root__: Literal[1, 2]
+
+
+class AliasEnum(BaseModel):
+    __root__: Literal[1, 2, 3]
+
+
+class MultipleTypeEnum(BaseModel):
+    __root__: Literal['red', 'amber', 'green', 42]
+
+
+class SingleEnum(BaseModel):
+    __root__: Literal['pet']
+
+
+class ArrayEnum(BaseModel):
+    __root__: List[Union[Literal['cat'], Literal['dog']]]
+
+
+class NestedNullableEnum(BaseModel):
+    nested_version: Optional[
+        Literal['RC1', 'RC1N', 'RC2', 'RC2N', 'RC3', 'RC4']
+    ] = Field('RC1', description='nullable enum', example='RC2')
+
+
+class Version(BaseModel):
+    __root__: Optional[Literal['RC1', 'RC1N', 'RC2', 'RC2N', 'RC3', 'RC4']] = Field(
+        'RC1', description='nullable enum', example='RC2'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/enum_models/as_literal.py 0.45.0-1/tests/data/expected/main/openapi/enum_models/as_literal.py
--- 0.26.4-3/tests/data/expected/main/openapi/enum_models/as_literal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/enum_models/as_literal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,72 @@
+# generated by datamodel-codegen:
+#   filename:  enum_models.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    kind: Optional[Literal['dog', 'cat']] = None
+    type: Optional[Literal['animal']] = None
+    number: Literal[1]
+    boolean: Literal[True]
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class Animal(BaseModel):
+    kind: Optional[Literal['snake', 'rabbit']] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class EnumObject(BaseModel):
+    type: Optional[Literal['a', 'b']] = None
+
+
+class EnumRoot(BaseModel):
+    __root__: Literal['a', 'b']
+
+
+class IntEnum(BaseModel):
+    __root__: Literal[1, 2]
+
+
+class AliasEnum(BaseModel):
+    __root__: Literal[1, 2, 3]
+
+
+class MultipleTypeEnum(BaseModel):
+    __root__: Literal['red', 'amber', 'green', 42]
+
+
+class SingleEnum(BaseModel):
+    __root__: Literal['pet']
+
+
+class ArrayEnum(BaseModel):
+    __root__: List[Union[Literal['cat'], Literal['dog']]]
+
+
+class NestedNullableEnum(BaseModel):
+    nested_version: Optional[
+        Literal['RC1', 'RC1N', 'RC2', 'RC2N', 'RC3', 'RC4']
+    ] = Field('RC1', description='nullable enum', example='RC2')
+
+
+class Version(BaseModel):
+    __root__: Optional[Literal['RC1', 'RC1N', 'RC2', 'RC2N', 'RC3', 'RC4']] = Field(
+        'RC1', description='nullable enum', example='RC2'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/enum_models/one.py 0.45.0-1/tests/data/expected/main/openapi/enum_models/one.py
--- 0.26.4-3/tests/data/expected/main/openapi/enum_models/one.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/enum_models/one.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,122 @@
+# generated by datamodel-codegen:
+#   filename:  enum_models.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Kind(Enum):
+    dog = 'dog'
+    cat = 'cat'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    kind: Optional[Kind] = None
+    type: Optional[Literal['animal']] = None
+    number: Literal[1]
+    boolean: Literal[True]
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class Kind1(Enum):
+    snake = 'snake'
+    rabbit = 'rabbit'
+
+
+class Animal(BaseModel):
+    kind: Optional[Kind1] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Type(Enum):
+    a = 'a'
+    b = 'b'
+
+
+class EnumObject(BaseModel):
+    type: Optional[Type] = None
+
+
+class EnumRoot(Enum):
+    a = 'a'
+    b = 'b'
+
+
+class IntEnum(Enum):
+    number_1 = 1
+    number_2 = 2
+
+
+class AliasEnum(Enum):
+    a = 1
+    b = 2
+    c = 3
+
+
+class MultipleTypeEnum(Enum):
+    red = 'red'
+    amber = 'amber'
+    green = 'green'
+    NoneType_None = None
+    int_42 = 42
+
+
+class SingleEnum(BaseModel):
+    __root__: Literal['pet']
+
+
+class ArrayEnum(BaseModel):
+    __root__: List[Union[Literal['cat'], Literal['dog']]]
+
+
+class NestedVersionEnum(Enum):
+    RC1 = 'RC1'
+    RC1N = 'RC1N'
+    RC2 = 'RC2'
+    RC2N = 'RC2N'
+    RC3 = 'RC3'
+    RC4 = 'RC4'
+
+
+class NestedVersion(BaseModel):
+    __root__: Optional[NestedVersionEnum] = Field(
+        'RC1', description='nullable enum', example='RC2'
+    )
+
+
+class NestedNullableEnum(BaseModel):
+    nested_version: Optional[NestedVersion] = Field(
+        default_factory=lambda: NestedVersion.parse_obj('RC1'),
+        description='nullable enum',
+        example='RC2',
+    )
+
+
+class VersionEnum(Enum):
+    RC1 = 'RC1'
+    RC1N = 'RC1N'
+    RC2 = 'RC2'
+    RC2N = 'RC2N'
+    RC3 = 'RC3'
+    RC4 = 'RC4'
+
+
+class Version(BaseModel):
+    __root__: Optional[VersionEnum] = Field(
+        'RC1', description='nullable enum', example='RC2'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/enum_models/one_literal_as_default.py 0.45.0-1/tests/data/expected/main/openapi/enum_models/one_literal_as_default.py
--- 0.26.4-3/tests/data/expected/main/openapi/enum_models/one_literal_as_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/enum_models/one_literal_as_default.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,122 @@
+# generated by datamodel-codegen:
+#   filename:  enum_models.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Kind(Enum):
+    dog = 'dog'
+    cat = 'cat'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    kind: Optional[Kind] = None
+    type: Optional[Literal['animal']] = None
+    number: Literal[1] = 1
+    boolean: Literal[True] = True
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class Kind1(Enum):
+    snake = 'snake'
+    rabbit = 'rabbit'
+
+
+class Animal(BaseModel):
+    kind: Optional[Kind1] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Type(Enum):
+    a = 'a'
+    b = 'b'
+
+
+class EnumObject(BaseModel):
+    type: Optional[Type] = None
+
+
+class EnumRoot(Enum):
+    a = 'a'
+    b = 'b'
+
+
+class IntEnum(Enum):
+    number_1 = 1
+    number_2 = 2
+
+
+class AliasEnum(Enum):
+    a = 1
+    b = 2
+    c = 3
+
+
+class MultipleTypeEnum(Enum):
+    red = 'red'
+    amber = 'amber'
+    green = 'green'
+    NoneType_None = None
+    int_42 = 42
+
+
+class SingleEnum(BaseModel):
+    __root__: Literal['pet'] = 'pet'
+
+
+class ArrayEnum(BaseModel):
+    __root__: List[Union[Literal['cat'], Literal['dog']]]
+
+
+class NestedVersionEnum(Enum):
+    RC1 = 'RC1'
+    RC1N = 'RC1N'
+    RC2 = 'RC2'
+    RC2N = 'RC2N'
+    RC3 = 'RC3'
+    RC4 = 'RC4'
+
+
+class NestedVersion(BaseModel):
+    __root__: Optional[NestedVersionEnum] = Field(
+        'RC1', description='nullable enum', example='RC2'
+    )
+
+
+class NestedNullableEnum(BaseModel):
+    nested_version: Optional[NestedVersion] = Field(
+        default_factory=lambda: NestedVersion.parse_obj('RC1'),
+        description='nullable enum',
+        example='RC2',
+    )
+
+
+class VersionEnum(Enum):
+    RC1 = 'RC1'
+    RC1N = 'RC1N'
+    RC2 = 'RC2'
+    RC2N = 'RC2N'
+    RC3 = 'RC3'
+    RC4 = 'RC4'
+
+
+class Version(BaseModel):
+    __root__: Optional[VersionEnum] = Field(
+        'RC1', description='nullable enum', example='RC2'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/enum_specialized.py 0.45.0-1/tests/data/expected/main/openapi/enum_specialized.py
--- 0.26.4-3/tests/data/expected/main/openapi/enum_specialized.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/enum_specialized.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,22 @@
+# generated by datamodel-codegen:
+#   filename:  subclass_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import StrEnum
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class ProcessingStatus(StrEnum):
+    COMPLETED = 'COMPLETED'
+    PENDING = 'PENDING'
+    FAILED = 'FAILED'
+
+
+class ProcessingTask(BaseModel):
+    processing_status: Optional[ProcessingStatus] = Field(
+        'COMPLETED', title='Status of the task'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/external_ref_with_transitive_local_ref/output.py 0.45.0-1/tests/data/expected/main/openapi/external_ref_with_transitive_local_ref/output.py
--- 0.26.4-3/tests/data/expected/main/openapi/external_ref_with_transitive_local_ref/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/external_ref_with_transitive_local_ref/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  openapi.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import AwareDatetime, BaseModel
+
+
+class FeedbackItemBase(BaseModel):
+    id: int
+    message: str
+
+
+class FeedbackItemCreate(FeedbackItemBase):
+    user_id: int
+
+
+class FeedbackItem(FeedbackItemBase):
+    created_at: AwareDatetime
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/external_relative_ref/module_openapi/__init__.py 0.45.0-1/tests/data/expected/main/openapi/external_relative_ref/module_openapi/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/external_relative_ref/module_openapi/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/external_relative_ref/module_openapi/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  model_b
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/external_relative_ref/module_openapi/model_a.py 0.45.0-1/tests/data/expected/main/openapi/external_relative_ref/module_openapi/model_a.py
--- 0.26.4-3/tests/data/expected/main/openapi/external_relative_ref/module_openapi/model_a.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/external_relative_ref/module_openapi/model_a.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  module.openapi.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Input(BaseModel):
+    name: Optional[str] = None
+
+
+class Output(BaseModel):
+    output: Optional[str] = None
+    input: Optional[Input] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/external_relative_ref/module_openapi/modules/__init__.py 0.45.0-1/tests/data/expected/main/openapi/external_relative_ref/module_openapi/modules/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/external_relative_ref/module_openapi/modules/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/external_relative_ref/module_openapi/modules/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  model_b
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/external_relative_ref/module_openapi/modules/quality_evaluation.py 0.45.0-1/tests/data/expected/main/openapi/external_relative_ref/module_openapi/modules/quality_evaluation.py
--- 0.26.4-3/tests/data/expected/main/openapi/external_relative_ref/module_openapi/modules/quality_evaluation.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/external_relative_ref/module_openapi/modules/quality_evaluation.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  module.openapi.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import model_a
+
+
+class QualityEvaluationRequest(BaseModel):
+    input: Optional[model_a.Output] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/extra_template_data_config.py 0.45.0-1/tests/data/expected/main/openapi/extra_template_data_config.py
--- 0.26.4-3/tests/data/expected/main/openapi/extra_template_data_config.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/extra_template_data_config.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,72 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):  # 1 2, 1 2, this is just a pet
+    class Config:
+        arbitrary_types_allowed = True
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/extra_template_data_config_pydantic_v2.py 0.45.0-1/tests/data/expected/main/openapi/extra_template_data_config_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/extra_template_data_config_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/extra_template_data_config_pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,73 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, ConfigDict, Field, RootModel
+
+
+class Pet(BaseModel):  # 1 2, 1 2, this is just a pet
+    model_config = ConfigDict(
+        arbitrary_types_allowed=True,
+        coerce_numbers_to_str=True,
+    )
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(RootModel[List[Pet]]):
+    root: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(RootModel[List[User]]):
+    root: List[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[List[str]]):
+    root: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[List[Api]]):
+    root: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/force_optional.py 0.45.0-1/tests/data/expected/main/openapi/force_optional.py
--- 0.26.4-3/tests/data/expected/main/openapi/force_optional.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/force_optional.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: Optional[int] = 1
+    name: Optional[str] = None
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: Optional[List[Pet]] = None
+
+
+class User(BaseModel):
+    id: Optional[int] = None
+    name: Optional[str] = None
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: Optional[List[User]] = None
+
+
+class Id(BaseModel):
+    __root__: Optional[str] = None
+
+
+class Rules(BaseModel):
+    __root__: Optional[List[str]] = None
+
+
+class Error(BaseModel):
+    code: Optional[int] = None
+    message: Optional[str] = None
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: Optional[List[Api]] = None
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/general.py 0.45.0-1/tests/data/expected/main/openapi/general.py
--- 0.26.4-3/tests/data/expected/main/openapi/general.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/general.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/http_refs.py 0.45.0-1/tests/data/expected/main/openapi/http_refs.py
--- 0.26.4-3/tests/data/expected/main/openapi/http_refs.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/http_refs.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,35 @@
+# generated by datamodel-codegen:
+#   filename:  https://example.com/refs.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AnyUrl, BaseModel, Field, conint
+
+
+class Problem(BaseModel):
+    detail: Optional[str] = Field(
+        None,
+        description='A human readable explanation specific to this occurrence of the\nproblem. You MUST NOT expose internal information, personal\ndata or implementation details through this field.\n',
+        example='Request took too long to complete.',
+    )
+    instance: Optional[AnyUrl] = Field(
+        None,
+        description='An absolute URI that identifies the specific occurrence of the problem.\nIt may or may not yield further information if dereferenced.\n',
+    )
+    status: Optional[conint(ge=100, lt=600)] = Field(
+        None,
+        description='The HTTP status code generated by the origin server for this occurrence\nof the problem.\n',
+        example=503,
+    )
+    title: Optional[str] = Field(
+        None,
+        description='A short, summary of the problem type. Written in english and readable\nfor engineers (usually not suited for non technical stakeholders and\nnot localized); example: Service Unavailable\n',
+    )
+    type: Optional[AnyUrl] = Field(
+        'about:blank',
+        description='An absolute URI that identifies the problem type.  When dereferenced,\nit SHOULD provide human-readable documentation for the problem type\n(e.g., using HTML).\n',
+        example='https://tools.ietf.org/html/rfc7231#section-6.6.4',
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/json_pointer.py 0.45.0-1/tests/data/expected/main/openapi/json_pointer.py
--- 0.26.4-3/tests/data/expected/main/openapi/json_pointer.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/json_pointer.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  json_pointer.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class TestNestedNested(BaseModel):
+    test_nested_nested_string: Optional[str] = None
+
+
+class TestNested(BaseModel):
+    test_string: Optional[str] = None
+    nested_nested: Optional[TestNestedNested] = None
+
+
+class Test(TestNested):
+    pass
+
+
+class Foo(Test):
+    pass
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/max_items_enum.py 0.45.0-1/tests/data/expected/main/openapi/max_items_enum.py
--- 0.26.4-3/tests/data/expected/main/openapi/max_items_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/max_items_enum.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  max_items_enum.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class BarEnum(Enum):
+    hello = 'hello'
+    goodbye = 'goodbye'
+
+
+class Foo(BaseModel):
+    bar: Optional[List[BarEnum]] = Field(None, max_items=3)
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/max_min_number.py 0.45.0-1/tests/data/expected/main/openapi/max_min_number.py
--- 0.26.4-3/tests/data/expected/main/openapi/max_min_number.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/max_min_number.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  max_min_number.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, confloat
+
+
+class Product(BaseModel):
+    price: Optional[confloat(ge=-999999.999999, le=999999.999999)] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import DifferentTea, Error, Id, OptionalModel, Result, Source
+
+__all__ = ["DifferentTea", "Error", "Id", "OptionalModel", "Result", "Source"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/_internal.py 0.45.0-1/tests/data/expected/main/openapi/modular/_internal.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,67 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+from . import models
+
+
+class OptionalModel(BaseModel):
+    __root__: str
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Result(BaseModel):
+    event: Optional[models.Event] = None
+
+
+class Source(BaseModel):
+    country: Optional[str] = None
+
+
+class DifferentTea(BaseModel):
+    foo: Optional[Tea] = None
+    nested: Optional[Tea_1] = None
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+
+
+class Cocoa(BaseModel):
+    quality: Optional[int] = None
+
+
+class Tea_1(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class TeaClone(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class ListModel(BaseModel):
+    __root__: List[Tea_1]
+
+
+Tea_1.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/bar.py 0.45.0-1/tests/data/expected/main/openapi/modular/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class FieldModel(BaseModel):
+    __root__: str = Field(..., example='green')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/collections.py 0.45.0-1/tests/data/expected/main/openapi/modular/collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular/collections.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,51 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import models
+
+
+class Pets(BaseModel):
+    __root__: List[models.Pet]
+
+
+class Users(BaseModel):
+    __root__: List[models.User]
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Stage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    stage: Optional[Stage] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/foo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular/foo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import Cocoa, Tea
+
+__all__ = ["Cocoa", "Tea"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/foo/bar.py 0.45.0-1/tests/data/expected/main/openapi/modular/foo/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular/foo/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    attributes: Optional[Dict[str, Any]] = None
+
+
+class Thang(BaseModel):
+    attributes: Optional[List[Dict[str, Any]]] = None
+
+
+class Others(BaseModel):
+    name: Optional[str] = None
+
+
+class Clone(Thing):
+    others: Optional[Others] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/models.py 0.45.0-1/tests/data/expected/main/openapi/modular/models.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular/models.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[Species] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Event(BaseModel):
+    name: Optional[Union[str, float, int, bool, Dict[str, Any], List[str]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/nested/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular/nested/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/nested/foo.py 0.45.0-1/tests/data/expected/main/openapi/modular/nested/foo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular/nested/foo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import ListModel
+from .._internal import Tea_1 as Tea
+from .._internal import TeaClone
+
+__all__ = ["ListModel", "Tea", "TeaClone"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/woo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular/woo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular/woo/boo.py 0.45.0-1/tests/data/expected/main/openapi/modular/woo/boo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular/woo/boo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import bar
+from .._internal import Cocoa, Source
+
+
+class Chocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[Source] = None
+    cocoa: Optional[Cocoa] = None
+    field: Optional[bar.FieldModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from ._internal import DifferentTea, Error, Id, OptionalModel, Result, Source
+
+__all__ = ["DifferentTea", "Error", "Id", "OptionalModel", "Result", "Source"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/_internal.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/_internal.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,66 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+from . import models
+
+
+class OptionalModel(BaseModel):
+    __root__: str
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Result(BaseModel):
+    event: Optional[models.Event] = None
+
+
+class Source(BaseModel):
+    country: Optional[str] = None
+
+
+class DifferentTea(BaseModel):
+    foo: Optional[Tea] = None
+    nested: Optional[Tea_1] = None
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+
+
+class Cocoa(BaseModel):
+    quality: Optional[int] = None
+
+
+class Tea_1(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class TeaClone(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class ListModel(BaseModel):
+    __root__: List[Tea_1]
+
+
+Tea_1.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/bar.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class FieldModel(BaseModel):
+    __root__: str = Field(..., example='green')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/collections.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/collections.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,50 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import models
+
+
+class Pets(BaseModel):
+    __root__: List[models.Pet]
+
+
+class Users(BaseModel):
+    __root__: List[models.User]
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Stage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    stage: Optional[Stage] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/foo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/foo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from .._internal import Cocoa, Tea
+
+__all__ = ["Cocoa", "Tea"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/foo/bar.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/foo/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/foo/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    attributes: Optional[Dict[str, Any]] = None
+
+
+class Thang(BaseModel):
+    attributes: Optional[List[Dict[str, Any]]] = None
+
+
+class Others(BaseModel):
+    name: Optional[str] = None
+
+
+class Clone(Thing):
+    others: Optional[Others] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/models.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/models.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/models.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[Species] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Event(BaseModel):
+    name: Optional[Union[str, float, int, bool, Dict[str, Any], List[str]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/nested/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/nested/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/nested/foo.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/nested/foo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/nested/foo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from .._internal import ListModel
+from .._internal import Tea_1 as Tea
+from .._internal import TeaClone
+
+__all__ = ["ListModel", "Tea", "TeaClone"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/woo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/woo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from __future__ import annotations
+
+from .boo import Chocolate
+
+__all__ = [
+    "Chocolate",
+]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/woo/boo.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/woo/boo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children/woo/boo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import bar
+from .._internal import Cocoa, Source
+
+
+class Chocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[Source] = None
+    cocoa: Optional[Cocoa] = None
+    field: Optional[bar.FieldModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+"""
+Custom module docstring header.
+
+This is a multi-line docstring used for testing.
+"""
+
+from __future__ import annotations
+
+from ._internal import DifferentTea, Error, Id, OptionalModel, Result, Source
+
+__all__ = ["DifferentTea", "Error", "Id", "OptionalModel", "Result", "Source"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/_internal.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/_internal.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+"""
+Custom module docstring header.
+
+This is a multi-line docstring used for testing.
+"""
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+from . import models
+
+
+class OptionalModel(BaseModel):
+    __root__: str
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Result(BaseModel):
+    event: Optional[models.Event] = None
+
+
+class Source(BaseModel):
+    country: Optional[str] = None
+
+
+class DifferentTea(BaseModel):
+    foo: Optional[Tea] = None
+    nested: Optional[Tea_1] = None
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+
+
+class Cocoa(BaseModel):
+    quality: Optional[int] = None
+
+
+class Tea_1(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class TeaClone(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class ListModel(BaseModel):
+    __root__: List[Tea_1]
+
+
+Tea_1.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/bar.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+"""
+Custom module docstring header.
+
+This is a multi-line docstring used for testing.
+"""
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class FieldModel(BaseModel):
+    __root__: str = Field(..., example='green')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/collections.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/collections.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,53 @@
+"""
+Custom module docstring header.
+
+This is a multi-line docstring used for testing.
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import models
+
+
+class Pets(BaseModel):
+    __root__: List[models.Pet]
+
+
+class Users(BaseModel):
+    __root__: List[models.User]
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Stage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    stage: Optional[Stage] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/foo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/foo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+"""
+Custom module docstring header.
+
+This is a multi-line docstring used for testing.
+"""
+
+from __future__ import annotations
+
+from .._internal import Cocoa, Tea
+
+__all__ = ["Cocoa", "Tea"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/foo/bar.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/foo/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/foo/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+"""
+Custom module docstring header.
+
+This is a multi-line docstring used for testing.
+"""
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    attributes: Optional[Dict[str, Any]] = None
+
+
+class Thang(BaseModel):
+    attributes: Optional[List[Dict[str, Any]]] = None
+
+
+class Others(BaseModel):
+    name: Optional[str] = None
+
+
+class Clone(Thing):
+    others: Optional[Others] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/models.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/models.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/models.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,35 @@
+"""
+Custom module docstring header.
+
+This is a multi-line docstring used for testing.
+"""
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[Species] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Event(BaseModel):
+    name: Optional[Union[str, float, int, bool, Dict[str, Any], List[str]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/nested/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/nested/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+"""
+Custom module docstring header.
+
+This is a multi-line docstring used for testing.
+"""
+
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/nested/foo.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/nested/foo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/nested/foo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+"""
+Custom module docstring header.
+
+This is a multi-line docstring used for testing.
+"""
+
+from __future__ import annotations
+
+from .._internal import ListModel
+from .._internal import Tea_1 as Tea
+from .._internal import TeaClone
+
+__all__ = ["ListModel", "Tea", "TeaClone"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/woo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/woo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+"""
+Custom module docstring header.
+
+This is a multi-line docstring used for testing.
+"""
+
+from __future__ import annotations
+
+from .boo import Chocolate
+
+__all__ = [
+    "Chocolate",
+]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/woo/boo.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/woo/boo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_children_docstring/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_children_docstring/woo/boo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+"""
+Custom module docstring header.
+
+This is a multi-line docstring used for testing.
+"""
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import bar
+from .._internal import Cocoa, Source
+
+
+class Chocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[Source] = None
+    cocoa: Optional[Cocoa] = None
+    field: Optional[bar.FieldModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from ._internal import DifferentTea, Error, Id, OptionalModel, Result, Source
+
+__all__ = ["DifferentTea", "Error", "Id", "OptionalModel", "Result", "Source"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/_internal.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/_internal.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,66 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+from . import models
+
+
+class OptionalModel(BaseModel):
+    __root__: str
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Result(BaseModel):
+    event: Optional[models.Event] = None
+
+
+class Source(BaseModel):
+    country: Optional[str] = None
+
+
+class DifferentTea(BaseModel):
+    foo: Optional[Tea] = None
+    nested: Optional[Tea_1] = None
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+
+
+class Cocoa(BaseModel):
+    quality: Optional[int] = None
+
+
+class Tea_1(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class TeaClone(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class ListModel(BaseModel):
+    __root__: List[Tea_1]
+
+
+Tea_1.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/bar.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class FieldModel(BaseModel):
+    __root__: str = Field(..., example='green')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/collections.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/collections.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,50 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import models
+
+
+class Pets(BaseModel):
+    __root__: List[models.Pet]
+
+
+class Users(BaseModel):
+    __root__: List[models.User]
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Stage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    stage: Optional[Stage] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/foo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/foo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from .._internal import Cocoa, Tea
+
+__all__ = ["Cocoa", "Tea"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/foo/bar.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/foo/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/foo/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    attributes: Optional[Dict[str, Any]] = None
+
+
+class Thang(BaseModel):
+    attributes: Optional[List[Dict[str, Any]]] = None
+
+
+class Others(BaseModel):
+    name: Optional[str] = None
+
+
+class Clone(Thing):
+    others: Optional[Others] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/models.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/models.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/models.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[Species] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Event(BaseModel):
+    name: Optional[Union[str, float, int, bool, Dict[str, Any], List[str]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/nested/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/nested/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/nested/foo.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/nested/foo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/nested/foo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from .._internal import ListModel
+from .._internal import Tea_1 as Tea
+from .._internal import TeaClone
+
+__all__ = ["ListModel", "Tea", "TeaClone"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/woo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/woo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from __future__ import annotations
+
+from .boo import Chocolate
+
+__all__ = [
+    "Chocolate",
+]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/woo/boo.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/woo/boo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive/woo/boo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import bar
+from .._internal import Cocoa, Source
+
+
+class Chocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[Source] = None
+    cocoa: Optional[Cocoa] = None
+    field: Optional[bar.FieldModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from ._internal import DifferentTea, Error, Id, OptionalModel, Result, Source
+
+__all__ = ["DifferentTea", "Error", "Id", "OptionalModel", "Result", "Source"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/_internal.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/_internal.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,66 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+from . import models
+
+
+class OptionalModel(BaseModel):
+    __root__: str
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Result(BaseModel):
+    event: Optional[models.Event] = None
+
+
+class Source(BaseModel):
+    country: Optional[str] = None
+
+
+class DifferentTea(BaseModel):
+    foo: Optional[Tea] = None
+    nested: Optional[Tea_1] = None
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+
+
+class Cocoa(BaseModel):
+    quality: Optional[int] = None
+
+
+class Tea_1(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class TeaClone(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class ListModel(BaseModel):
+    __root__: List[Tea_1]
+
+
+Tea_1.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/bar.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class FieldModel(BaseModel):
+    __root__: str = Field(..., example='green')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/collections.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/collections.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,50 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import models
+
+
+class Pets(BaseModel):
+    __root__: List[models.Pet]
+
+
+class Users(BaseModel):
+    __root__: List[models.User]
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Stage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    stage: Optional[Stage] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/foo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/foo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from .._internal import Cocoa, Tea
+
+__all__ = ["Cocoa", "Tea"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/foo/bar.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/foo/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/foo/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    attributes: Optional[Dict[str, Any]] = None
+
+
+class Thang(BaseModel):
+    attributes: Optional[List[Dict[str, Any]]] = None
+
+
+class Others(BaseModel):
+    name: Optional[str] = None
+
+
+class Clone(Thing):
+    others: Optional[Others] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/models.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/models.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/models.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[Species] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Event(BaseModel):
+    name: Optional[Union[str, float, int, bool, Dict[str, Any], List[str]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/nested/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/nested/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/nested/foo.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/nested/foo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/nested/foo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from .._internal import ListModel
+from .._internal import Tea_1 as Tea
+from .._internal import TeaClone
+
+__all__ = ["ListModel", "Tea", "TeaClone"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/woo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/woo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from __future__ import annotations
+
+from .boo import Chocolate
+
+__all__ = [
+    "Chocolate",
+]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/woo/boo.py 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/woo/boo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_all_exports_recursive_full_prefix/woo/boo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import bar
+from .._internal import Cocoa, Source
+
+
+class Chocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[Source] = None
+    cocoa: Optional[Cocoa] = None
+    field: Optional[bar.FieldModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import (
+    CustomDifferentTea,
+    CustomError,
+    CustomId,
+    CustomOptional,
+    CustomResult,
+    CustomSource,
+)
+
+__all__ = [
+    "CustomDifferentTea",
+    "CustomError",
+    "CustomId",
+    "CustomOptional",
+    "CustomResult",
+    "CustomSource",
+]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/_internal.py 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/_internal.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,67 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+from . import models
+
+
+class CustomOptional(BaseModel):
+    __root__: str
+
+
+class CustomId(BaseModel):
+    __root__: str
+
+
+class CustomError(BaseModel):
+    code: int
+    message: str
+
+
+class CustomResult(BaseModel):
+    event: Optional[models.CustomEvent] = None
+
+
+class CustomSource(BaseModel):
+    country: Optional[str] = None
+
+
+class CustomDifferentTea(BaseModel):
+    foo: Optional[CustomTea] = None
+    nested: Optional[CustomTea_1] = None
+
+
+class CustomTea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[CustomId] = None
+
+
+class CustomCocoa(BaseModel):
+    quality: Optional[int] = None
+
+
+class CustomTea_1(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[CustomId] = None
+    self: Optional[CustomTea_1] = None
+    optional: Optional[List[CustomOptional]] = None
+
+
+class CustomTeaClone(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[CustomId] = None
+    self: Optional[CustomTea_1] = None
+    optional: Optional[List[CustomOptional]] = None
+
+
+class CustomList(BaseModel):
+    __root__: List[CustomTea_1]
+
+
+CustomTea_1.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/bar.py 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class CustomField(BaseModel):
+    __root__: str = Field(..., example='green')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/collections.py 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/collections.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,51 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import models
+
+
+class CustomPets(BaseModel):
+    __root__: List[models.CustomPet]
+
+
+class CustomUsers(BaseModel):
+    __root__: List[models.CustomUser]
+
+
+class CustomRules(BaseModel):
+    __root__: List[str]
+
+
+class CustomStage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class CustomApi(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    stage: Optional[CustomStage] = None
+
+
+class CustomApis(BaseModel):
+    __root__: List[CustomApi]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/foo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/foo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import CustomCocoa, CustomTea
+
+__all__ = ["CustomCocoa", "CustomTea"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/foo/bar.py 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/foo/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/foo/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel
+
+
+class CustomThing(BaseModel):
+    attributes: Optional[Dict[str, Any]] = None
+
+
+class CustomThang(BaseModel):
+    attributes: Optional[List[Dict[str, Any]]] = None
+
+
+class CustomOthers(BaseModel):
+    name: Optional[str] = None
+
+
+class CustomClone(CustomThing):
+    others: Optional[CustomOthers] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/models.py 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/models.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/models.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class CustomSpecies(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class CustomPet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[CustomSpecies] = None
+
+
+class CustomUser(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class CustomEvent(BaseModel):
+    name: Optional[Union[str, float, int, bool, Dict[str, Any], List[str]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/nested/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/nested/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/nested/foo.py 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/nested/foo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/nested/foo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import CustomList
+from .._internal import CustomTea_1 as CustomTea
+from .._internal import CustomTeaClone
+
+__all__ = ["CustomList", "CustomTea", "CustomTeaClone"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/woo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/woo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/woo/boo.py 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/woo/boo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_custom_class_name/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_custom_class_name/woo/boo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import bar
+from .._internal import CustomCocoa, CustomSource
+
+
+class CustomChocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[CustomSource] = None
+    cocoa: Optional[CustomCocoa] = None
+    field: Optional[bar.CustomField] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import DifferentTea, Error, Id, OptionalModel, Result, Source
+
+__all__ = ["DifferentTea", "Error", "Id", "OptionalModel", "Result", "Source"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/_internal.py 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/_internal.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,65 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+from . import models
+
+
+class OptionalModel(BaseModel):
+    __root__: str
+
+
+class Id(OptionalModel):
+    pass
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Result(BaseModel):
+    event: Optional[models.Event] = None
+
+
+class Source(BaseModel):
+    country: Optional[str] = None
+
+
+class DifferentTea(BaseModel):
+    foo: Optional[Tea] = None
+    nested: Optional[Tea_1] = None
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+
+
+class Cocoa(BaseModel):
+    quality: Optional[int] = None
+
+
+class Tea_1(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class TeaClone(Tea_1):
+    pass
+
+
+class ListModel(BaseModel):
+    __root__: List[Tea_1]
+
+
+Tea_1.update_forward_refs()
+TeaClone.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/bar.py 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class FieldModel(BaseModel):
+    __root__: str = Field(..., example='green')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/collections.py 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/collections.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,51 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import models
+
+
+class Pets(BaseModel):
+    __root__: List[models.Pet]
+
+
+class Users(BaseModel):
+    __root__: List[models.User]
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Stage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    stage: Optional[Stage] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/foo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/foo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import Cocoa, Tea
+
+__all__ = ["Cocoa", "Tea"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/foo/bar.py 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/foo/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/foo/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    attributes: Optional[Dict[str, Any]] = None
+
+
+class Thang(BaseModel):
+    attributes: Optional[List[Dict[str, Any]]] = None
+
+
+class Others(BaseModel):
+    name: Optional[str] = None
+
+
+class Clone(Thing):
+    others: Optional[Others] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/models.py 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/models.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/models.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[Species] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Event(BaseModel):
+    name: Optional[Union[str, float, int, bool, Dict[str, Any], List[str]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/nested/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/nested/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/nested/foo.py 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/nested/foo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/nested/foo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import ListModel
+from .._internal import Tea_1 as Tea
+from .._internal import TeaClone
+
+__all__ = ["ListModel", "Tea", "TeaClone"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/woo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/woo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/woo/boo.py 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/woo/boo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_reuse_model/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_reuse_model/woo/boo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import bar
+from .._internal import Cocoa, Source
+
+
+class Chocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[Source] = None
+    cocoa: Optional[Cocoa] = None
+    field: Optional[bar.FieldModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import DifferentTea, Error, Id, Optional, Result, Source
+
+__all__ = ["DifferentTea", "Error", "Id", "Optional", "Result", "Source"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/_internal.py 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/_internal.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,58 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, NotRequired, TypeAlias, TypedDict
+
+from . import models
+
+Optional: TypeAlias = str
+
+
+Id: TypeAlias = str
+
+
+class Error(TypedDict):
+    code: int
+    message: str
+
+
+class Result(TypedDict):
+    event: NotRequired[models.Event]
+
+
+class Source(TypedDict):
+    country: NotRequired[str]
+
+
+class DifferentTea(TypedDict):
+    foo: NotRequired[Tea]
+    nested: NotRequired[Tea_1]
+
+
+class Tea(TypedDict):
+    flavour: NotRequired[str]
+    id: NotRequired[Id]
+
+
+class Cocoa(TypedDict):
+    quality: NotRequired[int]
+
+
+class Tea_1(TypedDict):
+    flavour: NotRequired[str]
+    id: NotRequired[Id]
+    self: NotRequired[Tea_1]
+    optional: NotRequired[List[Optional]]
+
+
+class TeaClone(TypedDict):
+    flavour: NotRequired[str]
+    id: NotRequired[Id]
+    self: NotRequired[Tea_1]
+    optional: NotRequired[List[Optional]]
+
+
+ListModel: TypeAlias = List[Tea_1]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/bar.py 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import TypeAlias
+
+Field: TypeAlias = str
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/collections.py 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/collections.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,28 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Literal, NotRequired, TypeAlias, TypedDict
+
+from . import models
+
+Pets: TypeAlias = List[models.Pet]
+
+
+Users: TypeAlias = List[models.User]
+
+
+Rules: TypeAlias = List[str]
+
+
+class Api(TypedDict):
+    apiKey: NotRequired[str]
+    apiVersionNumber: NotRequired[str]
+    apiUrl: NotRequired[str]
+    apiDocumentationUrl: NotRequired[str]
+    stage: NotRequired[Literal['test', 'dev', 'stg', 'prod']]
+
+
+Apis: TypeAlias = List[Api]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/foo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/foo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import Cocoa, Tea
+
+__all__ = ["Cocoa", "Tea"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/foo/bar.py 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/foo/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/foo/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, NotRequired, TypedDict
+
+
+class Thing(TypedDict):
+    attributes: NotRequired[Dict[str, Any]]
+
+
+class Thang(TypedDict):
+    attributes: NotRequired[List[Dict[str, Any]]]
+
+
+class Others(TypedDict):
+    name: NotRequired[str]
+
+
+class Clone(Thing):
+    others: NotRequired[Others]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/models.py 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/models.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/models.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Literal, NotRequired, TypeAlias, TypedDict, Union
+
+Species: TypeAlias = Literal['dog', 'cat', 'snake']
+
+
+class Pet(TypedDict):
+    id: int
+    name: str
+    tag: NotRequired[str]
+    species: NotRequired[Species]
+
+
+class User(TypedDict):
+    id: int
+    name: str
+    tag: NotRequired[str]
+
+
+class Event(TypedDict):
+    name: NotRequired[Union[str, float, int, bool, Dict[str, Any], List[str]]]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/nested/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/nested/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/nested/foo.py 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/nested/foo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/nested/foo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import ListModel
+from .._internal import Tea_1 as Tea
+from .._internal import TeaClone
+
+__all__ = ["ListModel", "Tea", "TeaClone"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/woo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/woo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/woo/boo.py 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/woo/boo.py
--- 0.26.4-3/tests/data/expected/main/openapi/modular_typed_dict/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/modular_typed_dict/woo/boo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import NotRequired, TypedDict
+
+from .. import bar
+from .._internal import Cocoa, Source
+
+
+class Chocolate(TypedDict):
+    flavour: NotRequired[str]
+    source: NotRequired[Source]
+    cocoa: NotRequired[Cocoa]
+    field: NotRequired[bar.Field]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/module_class_name_collision/A.py 0.45.0-1/tests/data/expected/main/openapi/module_class_name_collision/A.py
--- 0.26.4-3/tests/data/expected/main/openapi/module_class_name_collision/A.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/module_class_name_collision/A.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  openapi.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class A(BaseModel):
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/module_class_name_collision/__init__.py 0.45.0-1/tests/data/expected/main/openapi/module_class_name_collision/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/module_class_name_collision/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/module_class_name_collision/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  openapi.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import RootModel
+
+from .A import A
+
+
+class AGetResponse(RootModel[List[A]]):
+    root: List[A]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/module_class_name_collision_deep/A/B.py 0.45.0-1/tests/data/expected/main/openapi/module_class_name_collision_deep/A/B.py
--- 0.26.4-3/tests/data/expected/main/openapi/module_class_name_collision_deep/A/B.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/module_class_name_collision_deep/A/B.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  openapi.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class B(BaseModel):
+    value: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/module_class_name_collision_deep/A/__init__.py 0.45.0-1/tests/data/expected/main/openapi/module_class_name_collision_deep/A/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/module_class_name_collision_deep/A/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/module_class_name_collision_deep/A/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  openapi.json
+#   timestamp: 2019-07-26T00:00:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/module_class_name_collision_deep/__init__.py 0.45.0-1/tests/data/expected/main/openapi/module_class_name_collision_deep/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/module_class_name_collision_deep/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/module_class_name_collision_deep/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  openapi.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import RootModel
+
+from .A.B import B
+
+
+class BGetResponse(RootModel[List[B]]):
+    root: List[B]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_anyof.py 0.45.0-1/tests/data/expected/main/openapi/msgspec_anyof.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_anyof.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/msgspec_anyof.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,63 @@
+# generated by datamodel-codegen:
+#   filename:  anyof.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Dict, List, Union
+
+from msgspec import UNSET, Meta, Struct, UnsetType
+from typing_extensions import TypeAlias
+
+
+class Pet(Struct):
+    id: int
+    name: str
+    tag: Union[str, UnsetType] = UNSET
+
+
+class Car(Struct):
+    id: int
+    name: str
+    tag: Union[str, UnsetType] = UNSET
+
+
+class AnyOfItem1(Struct):
+    name: Union[str, UnsetType] = UNSET
+
+
+AnyOfItem2: TypeAlias = Annotated[str, Meta(max_length=5000)]
+
+
+AnyOfItem: TypeAlias = Union[Pet, Car, AnyOfItem1, AnyOfItem2]
+
+
+class Item(Struct):
+    name: Union[str, UnsetType] = UNSET
+
+
+Item1: TypeAlias = Annotated[str, Meta(max_length=5000)]
+
+
+class AnyOfobj(Struct):
+    item: Union[Pet, Car, Item, Item1, UnsetType] = UNSET
+
+
+class AnyOfArray1(Struct):
+    name: Union[str, UnsetType] = UNSET
+    birthday: Union[str, UnsetType] = UNSET
+
+
+AnyOfArray2: TypeAlias = Annotated[str, Meta(max_length=5000)]
+
+
+AnyOfArray: TypeAlias = List[Union[Pet, Car, AnyOfArray1, AnyOfArray2]]
+
+
+class Error(Struct):
+    code: int
+    message: str
+
+
+class Config(Struct):
+    setting: Union[Dict[str, Union[str, List[str]]], UnsetType] = UNSET
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_default_object/Another.py 0.45.0-1/tests/data/expected/main/openapi/msgspec_default_object/Another.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_default_object/Another.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/msgspec_default_object/Another.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Union
+
+from msgspec import Struct, UnsetType, convert, field
+from typing_extensions import TypeAlias
+
+from . import Foo as Foo_1
+from . import Nested
+
+Foo: TypeAlias = str
+
+
+class Bar(Struct):
+    original_foo: Union[Foo_1, UnsetType] = field(
+        default_factory=lambda: convert({'text': 'abc', 'number': 123}, type=Foo_1)
+    )
+    nested_foo: Union[List[Nested.Foo], UnsetType] = field(
+        default_factory=lambda: convert(['abc', 'efg'], type=list[Nested.Foo])
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_default_object/Nested.py 0.45.0-1/tests/data/expected/main/openapi/msgspec_default_object/Nested.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_default_object/Nested.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/msgspec_default_object/Nested.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Union
+
+from msgspec import Struct, UnsetType, convert, field
+from typing_extensions import TypeAlias
+
+from . import Foo as Foo_1
+
+Foo: TypeAlias = str
+
+
+class Bar(Struct):
+    foo: Union[Foo_1, UnsetType] = field(
+        default_factory=lambda: convert({'text': 'abc', 'number': 123}, type=Foo_1)
+    )
+    baz: Union[List[Foo_1], UnsetType] = field(
+        default_factory=lambda: convert(
+            [{'text': 'abc', 'number': 123}, {'text': 'efg', 'number': 456}],
+            type=list[Foo_1],
+        )
+    )
+    nested_foo: Union[Foo, UnsetType] = 'default foo'
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_default_object/__init__.py 0.45.0-1/tests/data/expected/main/openapi/msgspec_default_object/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_default_object/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/msgspec_default_object/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Union
+
+from msgspec import UNSET, Struct, UnsetType, convert, field
+
+
+class Foo(Struct):
+    text: Union[str, UnsetType] = '987'
+    number: Union[float, UnsetType] = UNSET
+
+
+class Bar(Struct):
+    foo: Union[Foo, UnsetType] = field(
+        default_factory=lambda: convert({'text': 'abc', 'number': 123}, type=Foo)
+    )
+    baz: Union[List[Foo], UnsetType] = field(
+        default_factory=lambda: convert(
+            [{'text': 'abc', 'number': 123}, {'text': 'efg', 'number': 456}],
+            type=list[Foo],
+        )
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_empty_dict_default.py 0.45.0-1/tests/data/expected/main/openapi/msgspec_empty_dict_default.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_empty_dict_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/msgspec_empty_dict_default.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  empty_dict_default.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Union
+
+from msgspec import UNSET, Struct, UnsetType, field
+
+
+class ObjectMeta(Struct):
+    name: Union[str, UnsetType] = UNSET
+    namespace: Union[str, UnsetType] = UNSET
+
+
+class PodSpec(Struct):
+    metadata: Union[ObjectMeta, UnsetType] = field(default_factory=dict)
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_keyword_only.py 0.45.0-1/tests/data/expected/main/openapi/msgspec_keyword_only.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_keyword_only.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/msgspec_keyword_only.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  inheritance.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Union
+
+from msgspec import UNSET, Struct, UnsetType
+
+
+class Base(Struct, kw_only=True):
+    id: str
+    createdAt: Union[str, UnsetType] = UNSET
+    version: Union[float, UnsetType] = 1
+
+
+class Child(Base, kw_only=True):
+    title: str
+    url: Union[str, UnsetType] = 'https://example.com'
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_keyword_only_omit_defaults.py 0.45.0-1/tests/data/expected/main/openapi/msgspec_keyword_only_omit_defaults.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_keyword_only_omit_defaults.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/msgspec_keyword_only_omit_defaults.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  inheritance.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Union
+
+from msgspec import UNSET, Struct, UnsetType
+
+
+class Base(Struct, omit_defaults=True, kw_only=True):
+    id: str
+    createdAt: Union[str, UnsetType] = UNSET
+    version: Union[float, UnsetType] = 1
+
+
+class Child(Base, omit_defaults=True, kw_only=True):
+    title: str
+    url: Union[str, UnsetType] = 'https://example.com'
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_mutual_type_alias.py 0.45.0-1/tests/data/expected/main/openapi/msgspec_mutual_type_alias.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_mutual_type_alias.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/msgspec_mutual_type_alias.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  type_alias_mutual_recursive.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import TypeAlias, Union
+
+NodeA: TypeAlias = Union[int, "NodeB"]
+
+
+NodeB: TypeAlias = Union[str, NodeA]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_nullable.py 0.45.0-1/tests/data/expected/main/openapi/msgspec_nullable.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_nullable.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/msgspec_nullable.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,81 @@
+# generated by datamodel-codegen:
+#   filename:  nullable.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Optional, TypeAlias, Union
+
+from msgspec import UNSET, Meta, Struct, UnsetType, field
+
+
+class Cursors(Struct):
+    prev: str
+    index: float
+    next: Union[str, UnsetType] = 'last'
+    tag: Union[str, UnsetType] = UNSET
+
+
+class TopLevel(Struct):
+    cursors: Cursors
+
+
+class Info(Struct):
+    name: str
+
+
+class User(Struct):
+    info: Info
+
+
+class Api(Struct):
+    apiKey: Union[
+        Annotated[str, Meta(description='To be used as a dataset parameter value')],
+        UnsetType,
+    ] = UNSET
+    apiVersionNumber: Union[
+        Annotated[str, Meta(description='To be used as a version parameter value')],
+        UnsetType,
+    ] = UNSET
+    apiUrl: Union[
+        Annotated[str, Meta(description="The URL describing the dataset's fields")],
+        UnsetType,
+    ] = UNSET
+    apiDocumentationUrl: Union[
+        Annotated[str, Meta(description='A URL to the API console for each API')],
+        UnsetType,
+    ] = UNSET
+
+
+Apis: TypeAlias = Optional[List[Api]]
+
+
+class EmailItem(Struct):
+    author: str
+    address: Annotated[str, Meta(description='email address')]
+    description: Union[str, UnsetType] = 'empty'
+    tag: Union[str, UnsetType] = UNSET
+
+
+Email: TypeAlias = List[EmailItem]
+
+
+Id: TypeAlias = int
+
+
+Description: TypeAlias = Annotated[Optional[str], 'example']
+
+
+Name: TypeAlias = Optional[str]
+
+
+Tag: TypeAlias = str
+
+
+class Notes(Struct):
+    comments: Union[List[str], UnsetType] = field(default_factory=list)
+
+
+class Options(Struct):
+    comments: List[str]
+    oneOfComments: List[Union[str, float]]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_oneof_with_null.py 0.45.0-1/tests/data/expected/main/openapi/msgspec_oneof_with_null.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_oneof_with_null.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/msgspec_oneof_with_null.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+# generated by datamodel-codegen:
+#   filename:  msgspec_oneof_with_null.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Union
+
+from msgspec import UNSET, Meta, Struct, UnsetType
+from typing_extensions import TypeAlias
+
+OptionalOneofWithNullAndConstraint: TypeAlias = Annotated[str, Meta(max_length=100)]
+
+
+class Model(Struct):
+    required_field: str
+    optional_oneof_with_null: Union[str, None, UnsetType] = UNSET
+    optional_anyof_with_null: Union[str, None, UnsetType] = UNSET
+    optional_field_not_nullable: Union[str, UnsetType] = UNSET
+    optional_oneof_with_null_and_constraint: Union[
+        OptionalOneofWithNullAndConstraint, None, UnsetType
+    ] = UNSET
+    optional_nullable_field: Union[str, UnsetType] = UNSET
+    optional_nullable_with_constraint: Union[
+        Annotated[str, Meta(max_length=50)], UnsetType
+    ] = UNSET
+    optional_nullable_with_min_length: Union[
+        Annotated[str, Meta(min_length=5)], UnsetType
+    ] = UNSET
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_oneof_with_null_union_operator.py 0.45.0-1/tests/data/expected/main/openapi/msgspec_oneof_with_null_union_operator.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_oneof_with_null_union_operator.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/msgspec_oneof_with_null_union_operator.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+# generated by datamodel-codegen:
+#   filename:  msgspec_oneof_with_null.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated
+
+from msgspec import UNSET, Meta, Struct, UnsetType
+from typing_extensions import TypeAlias
+
+OptionalOneofWithNullAndConstraint: TypeAlias = Annotated[str, Meta(max_length=100)]
+
+
+class Model(Struct):
+    required_field: str
+    optional_oneof_with_null: str | None | UnsetType = UNSET
+    optional_anyof_with_null: str | None | UnsetType = UNSET
+    optional_field_not_nullable: str | UnsetType = UNSET
+    optional_oneof_with_null_and_constraint: (
+        OptionalOneofWithNullAndConstraint | None | UnsetType
+    ) = UNSET
+    optional_nullable_field: str | UnsetType = UNSET
+    optional_nullable_with_constraint: (
+        Annotated[str, Meta(max_length=50)] | UnsetType
+    ) = UNSET
+    optional_nullable_with_min_length: (
+        Annotated[str, Meta(min_length=5)] | UnsetType
+    ) = UNSET
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_struct.py 0.45.0-1/tests/data/expected/main/openapi/msgspec_struct.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_struct.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/msgspec_struct.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Union
+
+from msgspec import UNSET, Meta, Struct, UnsetType
+from typing_extensions import TypeAlias
+
+
+class Pet(Struct):
+    id: int
+    name: str
+    tag: Union[str, UnsetType] = UNSET
+
+
+Pets: TypeAlias = List[Pet]
+
+
+class User(Struct):
+    id: int
+    name: str
+    tag: Union[str, UnsetType] = UNSET
+
+
+Users: TypeAlias = List[User]
+
+
+Id: TypeAlias = str
+
+
+Rules: TypeAlias = List[str]
+
+
+class Error(Struct):
+    code: int
+    message: str
+
+
+class Api(Struct):
+    apiKey: Union[
+        Annotated[str, Meta(description='To be used as a dataset parameter value')],
+        UnsetType,
+    ] = UNSET
+    apiVersionNumber: Union[
+        Annotated[str, Meta(description='To be used as a version parameter value')],
+        UnsetType,
+    ] = UNSET
+    apiUrl: Union[
+        Annotated[str, Meta(description="The URL describing the dataset's fields")],
+        UnsetType,
+    ] = UNSET
+    apiDocumentationUrl: Union[
+        Annotated[str, Meta(description='A URL to the API console for each API')],
+        UnsetType,
+    ] = UNSET
+
+
+Apis: TypeAlias = List[Api]
+
+
+class Event(Struct):
+    name: Union[str, UnsetType] = UNSET
+
+
+class Result(Struct):
+    event: Union[Event, UnsetType] = UNSET
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_struct_snake_case.py 0.45.0-1/tests/data/expected/main/openapi/msgspec_struct_snake_case.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_struct_snake_case.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/msgspec_struct_snake_case.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,70 @@
+# generated by datamodel-codegen:
+#   filename:  api_ordered_required_fields.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Union
+
+from msgspec import UNSET, Meta, Struct, UnsetType, field
+from typing_extensions import TypeAlias
+
+
+class Pet(Struct):
+    id: int
+    name: str
+    before_tag: str = field(name='beforeTag')
+    tag: Union[str, UnsetType] = UNSET
+
+
+Pets: TypeAlias = List[Pet]
+
+
+class User(Struct):
+    id: int
+    name: str
+    tag: Union[str, UnsetType] = UNSET
+
+
+Users: TypeAlias = List[User]
+
+
+Id: TypeAlias = str
+
+
+Rules: TypeAlias = List[str]
+
+
+class Error(Struct):
+    code: int
+    message: str
+
+
+class Api(Struct):
+    api_key: Union[
+        Annotated[str, Meta(description='To be used as a dataset parameter value')],
+        UnsetType,
+    ] = field(name='apiKey', default=UNSET)
+    api_version_number: Union[
+        Annotated[str, Meta(description='To be used as a version parameter value')],
+        UnsetType,
+    ] = field(name='apiVersionNumber', default=UNSET)
+    api_url: Union[
+        Annotated[str, Meta(description="The URL describing the dataset's fields")],
+        UnsetType,
+    ] = field(name='apiUrl', default=UNSET)
+    api_documentation_url: Union[
+        Annotated[str, Meta(description='A URL to the API console for each API')],
+        UnsetType,
+    ] = field(name='apiDocumentationUrl', default=UNSET)
+
+
+Apis: TypeAlias = List[Api]
+
+
+class Event(Struct):
+    name: Union[str, UnsetType] = UNSET
+
+
+class Result(Struct):
+    event: Union[Event, UnsetType] = UNSET
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_union_default_object.py 0.45.0-1/tests/data/expected/main/openapi/msgspec_union_default_object.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_union_default_object.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/msgspec_union_default_object.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  union_default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, Union
+
+from msgspec import UNSET, Struct, UnsetType, convert, field
+
+
+class Interval(Struct):
+    start: Union[int, UnsetType] = UNSET
+    end: Union[int, UnsetType] = UNSET
+
+
+class Container(Struct):
+    interval_or_string: Union[Interval, str, UnsetType] = field(
+        default_factory=lambda: convert({'start': 2009, 'end': 2019}, type=Interval)
+    )
+    string_or_interval: Union[Interval, str, UnsetType] = 'some string value'
+    dict_or_interval: Union[Dict[str, str], Interval, UnsetType] = field(
+        default_factory=lambda: {'key': 'value'}
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_use_annotated_with_field_constraints.py 0.45.0-1/tests/data/expected/main/openapi/msgspec_use_annotated_with_field_constraints.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_use_annotated_with_field_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/msgspec_use_annotated_with_field_constraints.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,92 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Union
+
+from msgspec import UNSET, Meta, Struct, UnsetType
+from typing_extensions import TypeAlias
+
+
+class Pet(Struct):
+    id: Annotated[int, Meta(ge=0, le=9223372036854775807)]
+    name: Annotated[str, Meta(max_length=256)]
+    tag: Union[Annotated[str, Meta(max_length=64)], UnsetType] = UNSET
+
+
+Pets: TypeAlias = List[Pet]
+
+
+UID: TypeAlias = Annotated[int, Meta(ge=0)]
+
+
+Phone: TypeAlias = Annotated[str, Meta(min_length=3)]
+
+
+FaxItem: TypeAlias = Annotated[str, Meta(min_length=3)]
+
+
+class User(Struct):
+    id: Annotated[int, Meta(ge=0)]
+    name: Annotated[str, Meta(max_length=256)]
+    uid: UID
+    tag: Union[Annotated[str, Meta(max_length=64)], UnsetType] = UNSET
+    phones: Union[List[Phone], UnsetType] = UNSET
+    fax: Union[List[FaxItem], UnsetType] = UNSET
+    height: Union[Annotated[Union[int, float], Meta(ge=1.0, le=300.0)], UnsetType] = (
+        UNSET
+    )
+    weight: Union[Annotated[Union[float, int], Meta(ge=1.0, le=1000.0)], UnsetType] = (
+        UNSET
+    )
+    age: Union[Annotated[int, Meta(gt=0, le=200)], UnsetType] = UNSET
+    rating: Union[Annotated[float, Meta(gt=0.0, le=5.0)], UnsetType] = UNSET
+
+
+Users: TypeAlias = List[User]
+
+
+Id: TypeAlias = str
+
+
+Rules: TypeAlias = List[str]
+
+
+class Error(Struct):
+    code: int
+    message: str
+
+
+class Api(Struct):
+    apiKey: Union[
+        Annotated[str, Meta(description='To be used as a dataset parameter value')],
+        UnsetType,
+    ] = UNSET
+    apiVersionNumber: Union[
+        Annotated[str, Meta(description='To be used as a version parameter value')],
+        UnsetType,
+    ] = UNSET
+    apiUrl: Union[
+        Annotated[
+            str,
+            Meta(description="The URL describing the dataset's fields", min_length=1),
+        ],
+        UnsetType,
+    ] = UNSET
+    apiDocumentationUrl: Union[
+        Annotated[str, Meta(description='A URL to the API console for each API')],
+        UnsetType,
+    ] = UNSET
+
+
+Apis: TypeAlias = List[Api]
+
+
+class Event(Struct):
+    name: Union[str, UnsetType] = UNSET
+
+
+class Result(Struct):
+    event: Union[Event, UnsetType] = UNSET
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/msgspec_use_union_operator.py 0.45.0-1/tests/data/expected/main/openapi/msgspec_use_union_operator.py
--- 0.26.4-3/tests/data/expected/main/openapi/msgspec_use_union_operator.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/msgspec_use_union_operator.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,81 @@
+# generated by datamodel-codegen:
+#   filename:  nullable.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, TypeAlias
+
+from msgspec import UNSET, Meta, Struct, UnsetType, field
+
+
+class Cursors(Struct):
+    prev: str
+    index: float
+    next: str | UnsetType = 'last'
+    tag: str | UnsetType = UNSET
+
+
+class TopLevel(Struct):
+    cursors: Cursors
+
+
+class Info(Struct):
+    name: str
+
+
+class User(Struct):
+    info: Info
+
+
+class Api(Struct):
+    apiKey: (
+        Annotated[str, Meta(description='To be used as a dataset parameter value')]
+        | UnsetType
+    ) = UNSET
+    apiVersionNumber: (
+        Annotated[str, Meta(description='To be used as a version parameter value')]
+        | UnsetType
+    ) = UNSET
+    apiUrl: (
+        Annotated[str, Meta(description="The URL describing the dataset's fields")]
+        | UnsetType
+    ) = UNSET
+    apiDocumentationUrl: (
+        Annotated[str, Meta(description='A URL to the API console for each API')]
+        | UnsetType
+    ) = UNSET
+
+
+Apis: TypeAlias = List[Api] | None
+
+
+class EmailItem(Struct):
+    author: str
+    address: Annotated[str, Meta(description='email address')]
+    description: str | UnsetType = 'empty'
+    tag: str | UnsetType = UNSET
+
+
+Email: TypeAlias = List[EmailItem]
+
+
+Id: TypeAlias = int
+
+
+Description: TypeAlias = Annotated[str | None, 'example']
+
+
+Name: TypeAlias = str | None
+
+
+Tag: TypeAlias = str
+
+
+class Notes(Struct):
+    comments: List[str] | UnsetType = field(default_factory=list)
+
+
+class Options(Struct):
+    comments: List[str]
+    oneOfComments: List[str | float]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/multiple_required_any_of.py 0.45.0-1/tests/data/expected/main/openapi/multiple_required_any_of.py
--- 0.26.4-3/tests/data/expected/main/openapi/multiple_required_any_of.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/multiple_required_any_of.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  multiple_required_any_of.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from ipaddress import IPv4Address, IPv6Address
+from typing import Optional, Union
+
+from pydantic import BaseModel
+
+
+class Addr1(BaseModel):
+    ipv4Addr: IPv4Address
+    ipv6Addr: Optional[IPv6Address] = None
+
+
+class Addr2(BaseModel):
+    ipv4Addr: Optional[IPv4Address] = None
+    ipv6Addr: IPv6Address
+
+
+class Addr(BaseModel):
+    __root__: Union[Addr1, Addr2]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/namespace_subns_ref/__init__.py 0.45.0-1/tests/data/expected/main/openapi/namespace_subns_ref/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/namespace_subns_ref/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/namespace_subns_ref/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  namespace_subns_ref.json
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/namespace_subns_ref/ns/__init__.py 0.45.0-1/tests/data/expected/main/openapi/namespace_subns_ref/ns/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/namespace_subns_ref/ns/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/namespace_subns_ref/ns/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  namespace_subns_ref.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import subns
+
+
+class Wrapper(BaseModel):
+    item: Optional[subns.Item] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/namespace_subns_ref/ns/subns.py 0.45.0-1/tests/data/expected/main/openapi/namespace_subns_ref/ns/subns.py
--- 0.26.4-3/tests/data/expected/main/openapi/namespace_subns_ref/ns/subns.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/namespace_subns_ref/ns/subns.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  namespace_subns_ref.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Item(BaseModel):
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/nested_enum.py 0.45.0-1/tests/data/expected/main/openapi/nested_enum.py
--- 0.26.4-3/tests/data/expected/main/openapi/nested_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/nested_enum.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+# generated by datamodel-codegen:
+#   filename:  nested_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+
+from pydantic import BaseModel
+
+
+class State(Enum):
+    field_1 = '1'
+    field_2 = '2'
+
+
+class NestedState1(Enum):
+    field_1 = '1'
+    field_2 = '2'
+
+
+class NestedState2(Enum):
+    field_1 = '1'
+    field_2 = '2'
+
+
+class Result1(BaseModel):
+    state: NestedState1
+
+
+class Result2(BaseModel):
+    state: NestedState2
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/nested_package_enum_default/io/__init__.py 0.45.0-1/tests/data/expected/main/openapi/nested_package_enum_default/io/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/nested_package_enum_default/io/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/nested_package_enum_default/io/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  nested_package_enum_default.json
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/nested_package_enum_default/io/example/__init__.py 0.45.0-1/tests/data/expected/main/openapi/nested_package_enum_default/io/example/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/nested_package_enum_default/io/example/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/nested_package_enum_default/io/example/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  nested_package_enum_default.json
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/nested_package_enum_default/io/example/api/__init__.py 0.45.0-1/tests/data/expected/main/openapi/nested_package_enum_default/io/example/api/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/nested_package_enum_default/io/example/api/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/nested_package_enum_default/io/example/api/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  nested_package_enum_default.json
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/nested_package_enum_default/io/example/api/v1.py 0.45.0-1/tests/data/expected/main/openapi/nested_package_enum_default/io/example/api/v1.py
--- 0.26.4-3/tests/data/expected/main/openapi/nested_package_enum_default/io/example/api/v1.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/nested_package_enum_default/io/example/api/v1.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  nested_package_enum_default.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from enum import Enum
+from typing import Optional
+
+
+class Resolution(Enum):
+    Required = 'Required'
+    Optional = 'Optional'
+
+
+class Policy(Enum):
+    Allow = 'Allow'
+    Deny = 'Deny'
+
+
+@dataclass
+class BucketSpec:
+    resolution: Optional[Resolution] = Resolution.Required
+    policy: Optional[Policy] = Policy.Allow
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/no_file.py 0.45.0-1/tests/data/expected/main/openapi/no_file.py
--- 0.26.4-3/tests/data/expected/main/openapi/no_file.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/no_file.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/null_only_enum.py 0.45.0-1/tests/data/expected/main/openapi/null_only_enum.py
--- 0.26.4-3/tests/data/expected/main/openapi/null_only_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/null_only_enum.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  null_only_enum.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class NullEnum(BaseModel):
+    __root__: None = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/nullable.py 0.45.0-1/tests/data/expected/main/openapi/nullable.py
--- 0.26.4-3/tests/data/expected/main/openapi/nullable.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/nullable.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,83 @@
+# generated by datamodel-codegen:
+#   filename:  nullable.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Cursors(BaseModel):
+    prev: str
+    next: Optional[str] = 'last'
+    index: float
+    tag: Optional[str] = None
+
+
+class TopLevel(BaseModel):
+    cursors: Cursors
+
+
+class Info(BaseModel):
+    name: str
+
+
+class User(BaseModel):
+    info: Info
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: Optional[List[Api]] = None
+
+
+class EmailItem(BaseModel):
+    author: str
+    address: str = Field(..., description='email address')
+    description: Optional[str] = 'empty'
+    tag: Optional[str] = None
+
+
+class Email(BaseModel):
+    __root__: List[EmailItem]
+
+
+class Id(BaseModel):
+    __root__: int
+
+
+class Description(BaseModel):
+    __root__: Optional[str] = 'example'
+
+
+class Name(BaseModel):
+    __root__: Optional[str] = None
+
+
+class Tag(BaseModel):
+    __root__: str
+
+
+class Notes(BaseModel):
+    comments: List[str] = Field(default_factory=list)
+
+
+class Options(BaseModel):
+    comments: List[str]
+    oneOfComments: List[Union[str, float]]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/nullable_31.py 0.45.0-1/tests/data/expected/main/openapi/nullable_31.py
--- 0.26.4-3/tests/data/expected/main/openapi/nullable_31.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/nullable_31.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  nullable_31.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import BaseModel
+
+
+class Apple(BaseModel):
+    pass
+
+
+class Basket(BaseModel):
+    apples: List[Apple] | None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/nullable_strict_nullable.py 0.45.0-1/tests/data/expected/main/openapi/nullable_strict_nullable.py
--- 0.26.4-3/tests/data/expected/main/openapi/nullable_strict_nullable.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/nullable_strict_nullable.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,83 @@
+# generated by datamodel-codegen:
+#   filename:  nullable.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Cursors(BaseModel):
+    prev: Optional[str] = Field(...)
+    next: str = 'last'
+    index: float
+    tag: Optional[str] = None
+
+
+class TopLevel(BaseModel):
+    cursors: Cursors
+
+
+class Info(BaseModel):
+    name: str
+
+
+class User(BaseModel):
+    info: Info
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: Optional[List[Api]] = Field(...)
+
+
+class EmailItem(BaseModel):
+    author: str
+    address: str = Field(..., description='email address')
+    description: str = 'empty'
+    tag: Optional[str] = None
+
+
+class Email(BaseModel):
+    __root__: List[EmailItem]
+
+
+class Id(BaseModel):
+    __root__: int
+
+
+class Description(BaseModel):
+    __root__: Optional[str] = 'example'
+
+
+class Name(BaseModel):
+    __root__: Optional[str] = None
+
+
+class Tag(BaseModel):
+    __root__: str
+
+
+class Notes(BaseModel):
+    comments: List[str] = Field(default_factory=list)
+
+
+class Options(BaseModel):
+    comments: List[Optional[str]]
+    oneOfComments: List[Union[Optional[str], Optional[float]]]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/nullable_strict_nullable_use_union_operator.py 0.45.0-1/tests/data/expected/main/openapi/nullable_strict_nullable_use_union_operator.py
--- 0.26.4-3/tests/data/expected/main/openapi/nullable_strict_nullable_use_union_operator.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/nullable_strict_nullable_use_union_operator.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,83 @@
+# generated by datamodel-codegen:
+#   filename:  nullable.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Cursors(BaseModel):
+    prev: str | None = Field(...)
+    next: str = 'last'
+    index: float
+    tag: str | None = None
+
+
+class TopLevel(BaseModel):
+    cursors: Cursors
+
+
+class Info(BaseModel):
+    name: str
+
+
+class User(BaseModel):
+    info: Info
+
+
+class Api(BaseModel):
+    apiKey: str | None = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: str | None = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: AnyUrl | None = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: AnyUrl | None = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api] | None = Field(...)
+
+
+class EmailItem(BaseModel):
+    author: str
+    address: str = Field(..., description='email address')
+    description: str = 'empty'
+    tag: str | None = None
+
+
+class Email(BaseModel):
+    __root__: List[EmailItem]
+
+
+class Id(BaseModel):
+    __root__: int
+
+
+class Description(BaseModel):
+    __root__: str | None = 'example'
+
+
+class Name(BaseModel):
+    __root__: str | None = None
+
+
+class Tag(BaseModel):
+    __root__: str
+
+
+class Notes(BaseModel):
+    comments: List[str] = Field(default_factory=list)
+
+
+class Options(BaseModel):
+    comments: List[str | None]
+    oneOfComments: List[str | float | None]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/oas_response_reference.py 0.45.0-1/tests/data/expected/main/openapi/oas_response_reference.py
--- 0.26.4-3/tests/data/expected/main/openapi/oas_response_reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/oas_response_reference.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  oas_response_reference.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class PetsGetResponse(BaseModel):
+    __root__: List[Pet]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/openapi_non_operations_and_security.py 0.45.0-1/tests/data/expected/main/openapi/openapi_non_operations_and_security.py
--- 0.26.4-3/tests/data/expected/main/openapi/openapi_non_operations_and_security.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/openapi_non_operations_and_security.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  non_operations_and_security.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: Optional[int] = None
+    name: Optional[str] = None
+
+
+class PetsGetResponse(BaseModel):
+    __root__: List[Pet]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/openapi_webhooks.py 0.45.0-1/tests/data/expected/main/openapi/openapi_webhooks.py
--- 0.26.4-3/tests/data/expected/main/openapi/openapi_webhooks.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/openapi_webhooks.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  webhooks.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class PetUpdate(BaseModel):
+    id: Optional[int] = None
+    name: Optional[str] = None
+    tag: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/openapi_webhooks_with_parameters.py 0.45.0-1/tests/data/expected/main/openapi/openapi_webhooks_with_parameters.py
--- 0.26.4-3/tests/data/expected/main/openapi/openapi_webhooks_with_parameters.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/openapi_webhooks_with_parameters.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  webhooks_with_parameters.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+
+
+class PetNewPostParametersQuery(BaseModel):
+    X_Request_Id: Optional[str] = Field(None, alias='X-Request-Id')
+    X_Webhook_Id: str = Field(..., alias='X-Webhook-Id')
+
+
+class PetUpdatedPostParametersQuery(BaseModel):
+    X_Correlation_Id: str = Field(..., alias='X-Correlation-Id')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/override_required_all_of.py 0.45.0-1/tests/data/expected/main/openapi/override_required_all_of.py
--- 0.26.4-3/tests/data/expected/main/openapi/override_required_all_of.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/override_required_all_of.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  override_required_all_of.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Type(Enum):
+    my_first_object = 'my_first_object'
+    my_second_object = 'my_second_object'
+
+
+class ObjectBase(BaseModel):
+    name: Optional[str] = Field(None, description='Name of the object')
+    type: Optional[Type] = Field(None, description='Object type')
+    rank: Optional[Union[int, float]] = Field(None, description='User rank')
+    allIn: Optional[Union[Type, str, Union[int, float]]] = None
+
+
+class CreateObjectRequest(ObjectBase):
+    name: str = Field(..., description='Name of the object')
+    type: Type = Field(..., description='Object type')
+    rank: Union[int, float] = Field(..., description='User rank')
+    allIn: Union[Type, str, Union[int, float]]
+
+
+class UpdateObjectRequest(ObjectBase):
+    pass
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/paths_external_ref.py 0.45.0-1/tests/data/expected/main/openapi/paths_external_ref.py
--- 0.26.4-3/tests/data/expected/main/openapi/paths_external_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/paths_external_ref.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  openapi.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Cat(BaseModel):
+    name: Optional[str] = None
+    breed: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/paths_ref_with_external_schema.py 0.45.0-1/tests/data/expected/main/openapi/paths_ref_with_external_schema.py
--- 0.26.4-3/tests/data/expected/main/openapi/paths_ref_with_external_schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/paths_ref_with_external_schema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  openapi.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pattern/general.py 0.45.0-1/tests/data/expected/main/openapi/pattern/general.py
--- 0.26.4-3/tests/data/expected/main/openapi/pattern/general.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/pattern/general.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,22 @@
+# generated by datamodel-codegen:
+#   filename:  pattern.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, constr
+
+
+class Info(BaseModel):
+    hostName: Optional[
+        constr(
+            regex=r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])\Z'
+        )
+    ] = None
+    arn: Optional[
+        constr(regex=r'(^arn:([^:]*):([^:]*):([^:]*):(|\*|[\d]{12}):(.+)$)|^\*$')
+    ] = None
+    tel: Optional[constr(regex=r'^(\([0-9]{3}\))?[0-9]{3}-[0-9]{4}$')] = None
+    comment: Optional[constr(regex=r'[^\b\f\n\r\t\\a+.?\'"|()]+$')] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pattern/msgspec_pattern.py 0.45.0-1/tests/data/expected/main/openapi/pattern/msgspec_pattern.py
--- 0.26.4-3/tests/data/expected/main/openapi/pattern/msgspec_pattern.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/pattern/msgspec_pattern.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  pattern.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Union
+
+from msgspec import UNSET, Meta, Struct, UnsetType
+
+
+class Info(Struct):
+    hostName: Union[str, UnsetType] = UNSET
+    arn: Union[
+        Annotated[
+            str,
+            Meta(pattern='(^arn:([^:]*):([^:]*):([^:]*):(|\\*|[\\d]{12}):(.+)$)|^\\*$'),
+        ],
+        UnsetType,
+    ] = UNSET
+    tel: Union[
+        Annotated[str, Meta(pattern='^(\\([0-9]{3}\\))?[0-9]{3}-[0-9]{4}$')], UnsetType
+    ] = UNSET
+    comment: Union[
+        Annotated[str, Meta(pattern='[^\\b\\f\\n\\r\\t\\\\a+.?\'"|()]+$')], UnsetType
+    ] = UNSET
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pattern/pydantic_v2.py 0.45.0-1/tests/data/expected/main/openapi/pattern/pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/pattern/pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/pattern/pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,22 @@
+# generated by datamodel-codegen:
+#   filename:  pattern.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, constr
+
+
+class Info(BaseModel):
+    hostName: Optional[
+        constr(
+            pattern=r'^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]{0,61}[A-Za-z0-9])$'
+        )
+    ] = None
+    arn: Optional[
+        constr(pattern=r'(^arn:([^:]*):([^:]*):([^:]*):(|\*|[\d]{12}):(.+)$)|^\*$')
+    ] = None
+    tel: Optional[constr(pattern=r'^(\([0-9]{3}\))?[0-9]{3}-[0-9]{4}$')] = None
+    comment: Optional[constr(pattern=r'[^\b\f\n\r\t\\a+.?\'"|()]+$')] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pattern_with_lookaround_pydantic_v2.py 0.45.0-1/tests/data/expected/main/openapi/pattern_with_lookaround_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/pattern_with_lookaround_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/pattern_with_lookaround_pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  pattern_lookaround.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, ConfigDict, constr
+
+
+class Info(BaseModel):
+    model_config = ConfigDict(
+        regex_engine="python-re",
+    )
+    name: Optional[constr(pattern=r'.*foo.*(?<!baz)bar.*')] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pattern_with_lookaround_pydantic_v2_field_constraints.py 0.45.0-1/tests/data/expected/main/openapi/pattern_with_lookaround_pydantic_v2_field_constraints.py
--- 0.26.4-3/tests/data/expected/main/openapi/pattern_with_lookaround_pydantic_v2_field_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/pattern_with_lookaround_pydantic_v2_field_constraints.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  pattern_lookaround.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, ConfigDict, Field
+
+
+class Info(BaseModel):
+    model_config = ConfigDict(
+        regex_engine="python-re",
+    )
+    name: Optional[str] = Field(None, pattern='.*foo.*(?<!baz)bar.*')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2.py 0.45.0-1/tests/data/expected/main/openapi/pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field, RootModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(RootModel[List[Pet]]):
+    root: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(RootModel[List[User]]):
+    root: List[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[List[str]]):
+    root: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[List[Api]]):
+    root: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2_default_object/Another.py 0.45.0-1/tests/data/expected/main/openapi/pydantic_v2_default_object/Another.py
--- 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2_default_object/Another.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/pydantic_v2_default_object/Another.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field, RootModel
+
+from . import Foo as Foo_1
+from . import Nested
+
+
+class Foo(RootModel[str]):
+    root: str
+
+
+class Bar(BaseModel):
+    original_foo: Optional[Foo_1] = Field(
+        default_factory=lambda: Foo_1.model_validate({'text': 'abc', 'number': 123})
+    )
+    nested_foo: Optional[List[Nested.Foo]] = Field(
+        default_factory=lambda: [Nested.Foo.model_validate(v) for v in ['abc', 'efg']]
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2_default_object/Nested.py 0.45.0-1/tests/data/expected/main/openapi/pydantic_v2_default_object/Nested.py
--- 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2_default_object/Nested.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/pydantic_v2_default_object/Nested.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,30 @@
+# generated by datamodel-codegen:
+#   filename:  default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field, RootModel
+
+from . import Foo as Foo_1
+
+
+class Foo(RootModel[str]):
+    root: str
+
+
+class Bar(BaseModel):
+    foo: Optional[Foo_1] = Field(
+        default_factory=lambda: Foo_1.model_validate({'text': 'abc', 'number': 123})
+    )
+    baz: Optional[List[Foo_1]] = Field(
+        default_factory=lambda: [
+            Foo_1.model_validate(v)
+            for v in [{'text': 'abc', 'number': 123}, {'text': 'efg', 'number': 456}]
+        ]
+    )
+    nested_foo: Optional[Foo] = Field(
+        default_factory=lambda: Foo.model_validate('default foo')
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2_default_object/__init__.py 0.45.0-1/tests/data/expected/main/openapi/pydantic_v2_default_object/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2_default_object/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/pydantic_v2_default_object/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Foo(BaseModel):
+    text: Optional[str] = '987'
+    number: Optional[float] = None
+
+
+class Bar(BaseModel):
+    foo: Optional[Foo] = Field(
+        default_factory=lambda: Foo.model_validate({'text': 'abc', 'number': 123})
+    )
+    baz: Optional[List[Foo]] = Field(
+        default_factory=lambda: [
+            Foo.model_validate(v)
+            for v in [{'text': 'abc', 'number': 123}, {'text': 'efg', 'number': 456}]
+        ]
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2_empty_dict_default.py 0.45.0-1/tests/data/expected/main/openapi/pydantic_v2_empty_dict_default.py
--- 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2_empty_dict_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/pydantic_v2_empty_dict_default.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  empty_dict_default.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class ObjectMeta(BaseModel):
+    name: Optional[str] = None
+    namespace: Optional[str] = None
+
+
+class PodSpec(BaseModel):
+    metadata: Optional[ObjectMeta] = Field(
+        default_factory=lambda: ObjectMeta.model_validate({})
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2_empty_list_default.py 0.45.0-1/tests/data/expected/main/openapi/pydantic_v2_empty_list_default.py
--- 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2_empty_list_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/pydantic_v2_empty_list_default.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  empty_list_default.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class Container(BaseModel):
+    name: Optional[str] = None
+
+
+class PodSpec(BaseModel):
+    containers: Optional[List[Container]] = Field(default_factory=list)
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2_union_default_object.py 0.45.0-1/tests/data/expected/main/openapi/pydantic_v2_union_default_object.py
--- 0.26.4-3/tests/data/expected/main/openapi/pydantic_v2_union_default_object.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/pydantic_v2_union_default_object.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,22 @@
+# generated by datamodel-codegen:
+#   filename:  union_default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Interval(BaseModel):
+    start: Optional[int] = None
+    end: Optional[int] = None
+
+
+class Container(BaseModel):
+    interval_or_string: Optional[Union[Interval, str]] = Field(
+        default_factory=lambda: Interval.model_validate({'start': 2009, 'end': 2019})
+    )
+    string_or_interval: Optional[Union[Interval, str]] = 'some string value'
+    dict_or_interval: Optional[Union[Dict[str, str], Interval]] = {'key': 'value'}
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pyproject.py 0.45.0-1/tests/data/expected/main/openapi/pyproject.py
--- 0.26.4-3/tests/data/expected/main/openapi/pyproject.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/pyproject.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,94 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import (
+    annotations,
+)
+
+from typing import (
+    List,
+    Optional,
+)
+
+from pydantic import (
+    AnyUrl,
+    BaseModel,
+    Field,
+)
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str]
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str]
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    api_key: Optional[
+        str
+    ] = Field(
+        None,
+        alias="apiKey",
+        description="To be used as a dataset parameter value",
+    )
+    api_version_number: Optional[
+        str
+    ] = Field(
+        None,
+        alias="apiVersionNumber",
+        description="To be used as a version parameter value",
+    )
+    api_url: Optional[
+        AnyUrl
+    ] = Field(
+        None,
+        alias="apiUrl",
+        description="The URL describing the dataset's fields",
+    )
+    api_documentation_url: Optional[
+        AnyUrl
+    ] = Field(
+        None,
+        alias="apiDocumentationUrl",
+        description="A URL to the API console for each API",
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str]
+
+
+class Result(BaseModel):
+    event: Optional[Event]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/pyproject_not_found.py 0.45.0-1/tests/data/expected/main/openapi/pyproject_not_found.py
--- 0.26.4-3/tests/data/expected/main/openapi/pyproject_not_found.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/pyproject_not_found.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_all.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_all.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_all.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_all.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AwareDatetime, BaseModel
+
+
+class UserRequest(BaseModel):
+    name: str
+    password: str
+    secret_token: Optional[str] = None
+
+
+class UserResponse(BaseModel):
+    id: int
+    name: str
+    created_at: Optional[AwareDatetime] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    password: str
+    created_at: Optional[AwareDatetime] = None
+    secret_token: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_allof.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_allof.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_allof.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_allof.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,40 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only_allof.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AwareDatetime, BaseModel
+
+
+class Timestamps(BaseModel):
+    created_at: Optional[AwareDatetime] = None
+    updated_at: Optional[AwareDatetime] = None
+
+
+class Credentials(BaseModel):
+    password: Optional[str] = None
+    api_key: Optional[str] = None
+
+
+class UserRequest(BaseModel):
+    password: Optional[str] = None
+    api_key: Optional[str] = None
+    name: str
+    email: Optional[str] = None
+
+
+class UserResponse(BaseModel):
+    created_at: Optional[AwareDatetime] = None
+    updated_at: Optional[AwareDatetime] = None
+    id: int
+    name: str
+    email: Optional[str] = None
+
+
+class User(Timestamps, Credentials):
+    id: int
+    name: str
+    email: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_allof_order.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_allof_order.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_allof_order.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_allof_order.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,41 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only_allof_order.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AwareDatetime, BaseModel
+
+
+class ChildRequest(BaseModel):
+    id: int
+    secret: Optional[str] = None
+    child_field: Optional[str] = None
+
+
+class ChildResponse(BaseModel):
+    id: int
+    created_at: Optional[AwareDatetime] = None
+    child_field: Optional[str] = None
+
+
+class ParentRequest(BaseModel):
+    id: int
+    secret: Optional[str] = None
+
+
+class ParentResponse(BaseModel):
+    id: int
+    created_at: Optional[AwareDatetime] = None
+
+
+class Parent(BaseModel):
+    id: int
+    created_at: Optional[AwareDatetime] = None
+    secret: Optional[str] = None
+
+
+class Child(Parent):
+    child_field: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_allof_request_response.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_allof_request_response.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_allof_request_response.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_allof_request_response.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only_allof.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AwareDatetime, BaseModel
+
+
+class UserRequest(BaseModel):
+    password: Optional[str] = None
+    api_key: Optional[str] = None
+    name: str
+    email: Optional[str] = None
+
+
+class UserResponse(BaseModel):
+    created_at: Optional[AwareDatetime] = None
+    updated_at: Optional[AwareDatetime] = None
+    id: int
+    name: str
+    email: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_allof_required_only.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_allof_required_only.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_allof_required_only.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_allof_required_only.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only_allof_required_only.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AwareDatetime, BaseModel
+
+
+class ChildRequest(BaseModel):
+    id: Optional[int] = None
+    child_field: Optional[str] = None
+
+
+class ParentRequest(BaseModel):
+    id: int
+
+
+class Parent(BaseModel):
+    id: int
+    created_at: Optional[AwareDatetime] = None
+
+
+class Child(Parent):
+    child_field: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_allof_url_ref.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_allof_url_ref.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_allof_url_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_allof_url_ref.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,41 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only_allof_url_ref.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AwareDatetime, BaseModel
+
+
+class ChildRequest(BaseModel):
+    id: int
+    password: Optional[str] = None
+    child_field: Optional[str] = None
+
+
+class ChildResponse(BaseModel):
+    id: int
+    created_at: Optional[AwareDatetime] = None
+    child_field: Optional[str] = None
+
+
+class BaseObjectRequest(BaseModel):
+    id: int
+    password: Optional[str] = None
+
+
+class BaseObjectResponse(BaseModel):
+    id: int
+    created_at: Optional[AwareDatetime] = None
+
+
+class BaseObject(BaseModel):
+    id: int
+    created_at: Optional[AwareDatetime] = None
+    password: Optional[str] = None
+
+
+class Child(BaseObject):
+    child_field: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_anyof.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_anyof.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_anyof.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_anyof.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only_anyof.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel
+
+
+class PetRequest(BaseModel):
+    id: int
+    token: Optional[Union[str, int]] = None
+
+
+class PetResponse(BaseModel):
+    id: int
+    status: Optional[Union[str, int]] = None
+
+
+class Pet(BaseModel):
+    id: int
+    status: Optional[Union[str, int]] = None
+    token: Optional[Union[str, int]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_collision.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_collision.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_collision.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_collision.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only_collision.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class UserRequest(BaseModel):
+    existing_field: Optional[str] = None
+
+
+class UserRequest1(BaseModel):
+    name: str
+    password: Optional[str] = None
+
+
+class UserResponse(BaseModel):
+    id: int
+    name: str
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    password: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_default.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_default.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_default.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AwareDatetime, BaseModel
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    password: str
+    created_at: Optional[AwareDatetime] = None
+    secret_token: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_double_collision.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_double_collision.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_double_collision.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_double_collision.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only_double_collision.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class UserRequest(BaseModel):
+    existing_field: Optional[str] = None
+
+
+class UserRequestModel(BaseModel):
+    another_field: Optional[str] = None
+
+
+class UserRequest1(BaseModel):
+    name: str
+    password: Optional[str] = None
+
+
+class UserResponse(BaseModel):
+    id: int
+    name: str
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    password: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_duplicate_allof_ref.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_duplicate_allof_ref.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_duplicate_allof_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_duplicate_allof_ref.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,43 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only_duplicate_allof_ref.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class BaseRequest(BaseModel):
+    name: Optional[str] = None
+
+
+class Base(BaseModel):
+    id: Optional[int] = None
+    name: Optional[str] = None
+
+
+class ParentRequest(BaseModel):
+    name: Optional[str] = None
+    parent_field: Optional[str] = None
+
+
+class Parent(Base):
+    parent_field: Optional[str] = None
+
+
+class ChildRequest(BaseModel):
+    name: Optional[str] = None
+    parent_field: Optional[str] = None
+    extra: Optional[str] = None
+
+
+class ChildResponse(BaseModel):
+    id: Optional[int] = None
+    name: Optional[str] = None
+    parent_field: Optional[str] = None
+
+
+class Child(Parent):
+    extra: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_empty_base.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_empty_base.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_empty_base.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_empty_base.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only_empty_base.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class EmptyBase(BaseModel):
+    pass
+
+
+class HasFieldBase(BaseModel):
+    base_field: Optional[str] = None
+
+
+class ChildRequest(BaseModel):
+    base_field: Optional[str] = None
+
+
+class Child(EmptyBase, HasFieldBase):
+    id: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_mixed.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_mixed.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_mixed.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_mixed.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,28 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only_mixed.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class UserRequest(BaseModel):
+    name: str
+    password: Optional[str] = None
+
+
+class UserResponse(BaseModel):
+    id: Optional[int] = None
+    name: str
+
+
+class Address(BaseModel):
+    street: Optional[str] = None
+    city: str
+
+
+class ProductRequest(BaseModel):
+    name: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_nested_allof.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_nested_allof.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_nested_allof.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_nested_allof.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only_nested_allof.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AwareDatetime, BaseModel
+
+
+class BaseTimestamps(BaseModel):
+    created_at: Optional[AwareDatetime] = None
+
+
+class ExtendedTimestamps(BaseTimestamps):
+    updated_at: Optional[AwareDatetime] = None
+
+
+class UserRequest(BaseModel):
+    name: str
+    password: Optional[str] = None
+
+
+class UserResponse(BaseModel):
+    created_at: Optional[AwareDatetime] = None
+    updated_at: Optional[AwareDatetime] = None
+    name: str
+
+
+class User(ExtendedTimestamps):
+    name: str
+    password: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_nested_allof_order.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_nested_allof_order.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_nested_allof_order.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_nested_allof_order.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,48 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only_nested_allof_order.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AwareDatetime, BaseModel
+
+
+class ChildRequest(BaseModel):
+    id: int
+    parent_field: Optional[str] = None
+    child_field: Optional[str] = None
+
+
+class ChildResponse(BaseModel):
+    id: int
+    created_at: Optional[AwareDatetime] = None
+    child_field: Optional[str] = None
+
+
+class ParentRequest(BaseModel):
+    id: int
+    parent_field: Optional[str] = None
+
+
+class ParentResponse(BaseModel):
+    id: int
+    created_at: Optional[AwareDatetime] = None
+
+
+class GrandParentRequest(BaseModel):
+    id: int
+
+
+class GrandParent(BaseModel):
+    id: int
+    created_at: Optional[AwareDatetime] = None
+
+
+class Parent(GrandParent):
+    parent_field: Optional[str] = None
+
+
+class Child(Parent):
+    child_field: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_ref.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_ref.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_ref.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only_ref.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, RootModel
+
+
+class ReadOnlyId(RootModel[int]):
+    root: int
+
+
+class WriteOnlySecret(RootModel[str]):
+    root: str
+
+
+class UserRequest(BaseModel):
+    name: str
+    secret: Optional[WriteOnlySecret] = None
+
+
+class UserResponse(BaseModel):
+    id: ReadOnlyId
+    name: str
+
+
+class User(BaseModel):
+    id: ReadOnlyId
+    name: str
+    secret: Optional[WriteOnlySecret] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_ref_with_desc.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_ref_with_desc.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_ref_with_desc.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_ref_with_desc.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,34 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only_ref_with_desc.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class AddressRequest(BaseModel):
+    street: Optional[str] = None
+
+
+class Address(BaseModel):
+    street: Optional[str] = None
+    city: Optional[str] = None
+
+
+class Base(BaseModel):
+    base_id: Optional[int] = None
+
+
+class UserRequest(BaseModel):
+    base_id: Optional[int] = None
+    name: str
+    home_address: Optional[Address] = Field(None, description="User's home address")
+
+
+class User(Base):
+    name: str
+    home_address: Optional[Address] = Field(None, description="User's home address")
+    work_address: Optional[Address] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_request_response.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_request_response.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_request_response.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_request_response.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AwareDatetime, BaseModel
+
+
+class UserRequest(BaseModel):
+    name: str
+    password: str
+    secret_token: Optional[str] = None
+
+
+class UserResponse(BaseModel):
+    id: int
+    name: str
+    created_at: Optional[AwareDatetime] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_shared_base_ref.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_shared_base_ref.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_shared_base_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_shared_base_ref.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,59 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only_shared_base_ref.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class SharedBaseRequest(BaseModel):
+    shared_name: Optional[str] = None
+
+
+class SharedBase(BaseModel):
+    shared_id: Optional[int] = None
+    shared_name: Optional[str] = None
+
+
+class Parent1Request(BaseModel):
+    shared_name: Optional[str] = None
+    parent1_field: Optional[str] = None
+
+
+class Parent1(SharedBase):
+    parent1_field: Optional[str] = None
+
+
+class Parent2Request(BaseModel):
+    shared_name: Optional[str] = None
+    parent2_field: Optional[str] = None
+
+
+class Parent2Response(BaseModel):
+    shared_id: Optional[int] = None
+    shared_name: Optional[str] = None
+
+
+class Parent2(SharedBase):
+    parent2_field: Optional[str] = None
+
+
+class ChildRequest(BaseModel):
+    shared_name: Optional[str] = None
+    parent1_field: Optional[str] = None
+    parent2_field: Optional[str] = None
+    child_field: Optional[str] = None
+
+
+class ChildResponse(BaseModel):
+    shared_id: Optional[int] = None
+    shared_name: Optional[str] = None
+    parent1_field: Optional[str] = None
+    child_field: Optional[str] = None
+
+
+class Child(Parent1, Parent2):
+    child_field: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_union.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_union.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_union.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_union.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only_union.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import BaseModel
+
+
+class UserRequest(BaseModel):
+    name: str
+    metadata: Optional[Union[str, int]] = None
+
+
+class UserResponse(BaseModel):
+    id: int
+    name: str
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    metadata: Optional[Union[str, int]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_url_ref.py 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_url_ref.py
--- 0.26.4-3/tests/data/expected/main/openapi/read_only_write_only_url_ref.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/read_only_write_only_url_ref.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  read_only_write_only_url_ref.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AwareDatetime, BaseModel, RootModel
+
+
+class ReadOnlyTimestamp(RootModel[AwareDatetime]):
+    root: AwareDatetime
+
+
+class WriteOnlySecret(RootModel[str]):
+    root: str
+
+
+class UserRequest(BaseModel):
+    name: str
+    secret: Optional[WriteOnlySecret] = None
+
+
+class UserResponse(BaseModel):
+    name: str
+    created_at: Optional[ReadOnlyTimestamp] = None
+
+
+class User(BaseModel):
+    name: str
+    created_at: Optional[ReadOnlyTimestamp] = None
+    secret: Optional[WriteOnlySecret] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/ref_nullable_strict_nullable.py 0.45.0-1/tests/data/expected/main/openapi/ref_nullable_strict_nullable.py
--- 0.26.4-3/tests/data/expected/main/openapi/ref_nullable_strict_nullable.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/ref_nullable_strict_nullable.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  ref_nullable.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class NullableChild(BaseModel):
+    name: str | None = None
+
+
+class NonNullableChild(BaseModel):
+    name: str | None = None
+
+
+class Parent(BaseModel):
+    nullableChild: NullableChild | None
+    nonNullableChild: NonNullableChild
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/reference_same_hierarchy_directory.py 0.45.0-1/tests/data/expected/main/openapi/reference_same_hierarchy_directory.py
--- 0.26.4-3/tests/data/expected/main/openapi/reference_same_hierarchy_directory.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/reference_same_hierarchy_directory.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  entities.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class CatDetails(BaseModel):
+    name: str = Field(..., description='Name of this cat')
+    birthYear: float = Field(..., description="Year of this cat's birth")
+
+
+class CatInfo(BaseModel):
+    cat_id: str = Field(..., description='ID of this cat')
+    details: Optional[CatDetails] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/reference_to_object_properties.py 0.45.0-1/tests/data/expected/main/openapi/reference_to_object_properties.py
--- 0.26.4-3/tests/data/expected/main/openapi/reference_to_object_properties.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/reference_to_object_properties.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,31 @@
+# generated by datamodel-codegen:
+#   filename:  reference_to_object_properties.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Pet(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+
+
+class Parent(BaseModel):
+    id: Optional[Id] = None
+    name: Optional[str] = None
+    pet: Optional[Pet] = None
+
+
+class Child(BaseModel):
+    id: Optional[Id] = None
+    parent_id: Optional[Id] = None
+    name: Optional[str] = None
+    pet: Optional[Pet] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/reference_to_object_properties_collapse_root_models.py 0.45.0-1/tests/data/expected/main/openapi/reference_to_object_properties_collapse_root_models.py
--- 0.26.4-3/tests/data/expected/main/openapi/reference_to_object_properties_collapse_root_models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/reference_to_object_properties_collapse_root_models.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  reference_to_object_properties.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+
+
+class Parent(BaseModel):
+    id: Optional[str] = None
+    name: Optional[str] = None
+    pet: Optional[Pet] = None
+
+
+class Child(BaseModel):
+    id: Optional[str] = None
+    parent_id: Optional[str] = None
+    name: Optional[str] = None
+    pet: Optional[Pet] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/referenced_default.py 0.45.0-1/tests/data/expected/main/openapi/referenced_default.py
--- 0.26.4-3/tests/data/expected/main/openapi/referenced_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/referenced_default.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+# generated by datamodel-codegen:
+#   filename:  referenced_default.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field, RootModel, confloat
+
+
+class ModelSettingB(RootModel[confloat(ge=0.0, le=10.0)]):
+    root: confloat(ge=0.0, le=10.0)
+
+
+class Model(BaseModel):
+    settingA: Optional[confloat(ge=0.0, le=10.0)] = 5
+    settingB: Optional[ModelSettingB] = Field(
+        default_factory=lambda: ModelSettingB.model_validate(5)
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/referenced_default_use_annotated.py 0.45.0-1/tests/data/expected/main/openapi/referenced_default_use_annotated.py
--- 0.26.4-3/tests/data/expected/main/openapi/referenced_default_use_annotated.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/referenced_default_use_annotated.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  referenced_default.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, Optional
+
+from pydantic import BaseModel, Field, RootModel
+
+
+class ModelSettingB(RootModel[float]):
+    root: Annotated[float, Field(ge=0.0, le=10.0)]
+
+
+class Model(BaseModel):
+    settingA: Annotated[Optional[float], Field(ge=0.0, le=10.0)] = 5
+    settingB: Annotated[
+        Optional[ModelSettingB],
+        Field(default_factory=lambda: ModelSettingB.model_validate(ModelSettingB(5))),
+    ]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/same_name_objects.py 0.45.0-1/tests/data/expected/main/openapi/same_name_objects.py
--- 0.26.4-3/tests/data/expected/main/openapi/same_name_objects.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/same_name_objects.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,43 @@
+# generated by datamodel-codegen:
+#   filename:  same_name_objects.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Extra
+
+
+class Pets(BaseModel):
+    pass
+
+    class Config:
+        extra = Extra.forbid
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Resolved(BaseModel):
+    resolved: Optional[List[str]] = None
+
+
+class PetsModel(BaseModel):
+    __root__: List[Pet]
+
+
+class Friends2(BaseModel):
+    __root__: PetsModel
+
+
+class Friends1(BaseModel):
+    __root__: PetsModel
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/serialize_as_any_pydantic_v2.py 0.45.0-1/tests/data/expected/main/openapi/serialize_as_any_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/serialize_as_any_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/serialize_as_any_pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+# generated by datamodel-codegen:
+#   filename:  serialize_as_any.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import BaseModel, Field, SerializeAsAny
+
+
+class User(BaseModel):
+    name: str = Field(..., description="User's name")
+
+
+class AdminUser(User):
+    admin_level: int = Field(..., description='Admin permission level')
+
+
+class Container(BaseModel):
+    admin_user_field: AdminUser = Field(
+        ..., description='Field that should not use SerializeAsAny'
+    )
+    user_field: SerializeAsAny[User] = Field(
+        ..., description='Field that should use SerializeAsAny'
+    )
+    user_list: List[SerializeAsAny[User]] = Field(
+        ..., description='List of users that should use SerializeAsAny'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/shadowed_imports.py 0.45.0-1/tests/data/expected/main/openapi/shadowed_imports.py
--- 0.26.4-3/tests/data/expected/main/openapi/shadowed_imports.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/shadowed_imports.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  shadowed_imports.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from datetime import date as date_aliased
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class MarketingOptIn(BaseModel):
+    optedIn: Optional[bool] = Field(None, examples=[False])
+    date: Optional[date_aliased] = Field(None, examples=['2018-04-26T17:03:25.155Z'])
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/special_yaml_keywords.py 0.45.0-1/tests/data/expected/main/openapi/special_yaml_keywords.py
--- 0.26.4-3/tests/data/expected/main/openapi/special_yaml_keywords.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/special_yaml_keywords.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,30 @@
+# generated by datamodel-codegen:
+#   filename:  special_yaml_keywords.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class None1(BaseModel):
+    pass
+
+
+class False1(BaseModel):
+    pass
+
+
+class True1(BaseModel):
+    pass
+
+
+class On(BaseModel):
+    pass
+
+
+class NestedKeywords(BaseModel):
+    None_: None1 = Field(..., alias='None')
+    false: False1
+    True_: True1 = Field(..., alias='True')
+    on: On
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/stdin.py 0.45.0-1/tests/data/expected/main/openapi/stdin.py
--- 0.26.4-3/tests/data/expected/main/openapi/stdin.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/stdin.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  <stdin>
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/strict_types_field_constraints_msgspec.py 0.45.0-1/tests/data/expected/main/openapi/strict_types_field_constraints_msgspec.py
--- 0.26.4-3/tests/data/expected/main/openapi/strict_types_field_constraints_msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/strict_types_field_constraints_msgspec.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  strict_types_field_constraints.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated
+
+from msgspec import Meta
+from typing_extensions import TypeAlias
+
+Timestamp: TypeAlias = Annotated[int, Meta(ge=1, le=9999999999)]
+
+
+Score: TypeAlias = Annotated[float, Meta(ge=0.0, le=100.0)]
+
+
+Name: TypeAlias = Annotated[str, Meta(max_length=100, min_length=1)]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/strict_types_field_constraints_pydantic_v2.py 0.45.0-1/tests/data/expected/main/openapi/strict_types_field_constraints_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/strict_types_field_constraints_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/strict_types_field_constraints_pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  strict_types_field_constraints.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import Field, RootModel, StrictFloat, StrictInt, StrictStr
+
+
+class Timestamp(RootModel[StrictInt]):
+    root: StrictInt = Field(..., ge=1, le=9999999999)
+
+
+class Score(RootModel[StrictFloat]):
+    root: StrictFloat = Field(..., ge=0.0, le=100.0)
+
+
+class Name(RootModel[StrictStr]):
+    root: StrictStr = Field(..., max_length=100, min_length=1)
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/subclass_enum.py 0.45.0-1/tests/data/expected/main/openapi/subclass_enum.py
--- 0.26.4-3/tests/data/expected/main/openapi/subclass_enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/subclass_enum.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,22 @@
+# generated by datamodel-codegen:
+#   filename:  subclass_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class ProcessingStatus(Enum):
+    COMPLETED = 'COMPLETED'
+    PENDING = 'PENDING'
+    FAILED = 'FAILED'
+
+
+class ProcessingTask(BaseModel):
+    processing_status: Optional[ProcessingStatus] = Field(
+        'COMPLETED', title='Status of the task'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/target_python_version.py 0.45.0-1/tests/data/expected/main/openapi/target_python_version.py
--- 0.26.4-3/tests/data/expected/main/openapi/target_python_version.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/target_python_version.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/target_python_version_313_has_future_annotations.py 0.45.0-1/tests/data/expected/main/openapi/target_python_version_313_has_future_annotations.py
--- 0.26.4-3/tests/data/expected/main/openapi/target_python_version_313_has_future_annotations.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/target_python_version_313_has_future_annotations.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/target_python_version_314_no_future_annotations.py 0.45.0-1/tests/data/expected/main/openapi/target_python_version_314_no_future_annotations.py
--- 0.26.4-3/tests/data/expected/main/openapi/target_python_version_314_no_future_annotations.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/target_python_version_314_no_future_annotations.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,67 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/type_alias.py 0.45.0-1/tests/data/expected/main/openapi/type_alias.py
--- 0.26.4-3/tests/data/expected/main/openapi/type_alias.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/type_alias.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,30 @@
+# generated by datamodel-codegen:
+#   filename:  type_alias.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Optional, Union
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAlias
+
+SimpleString: TypeAlias = str
+
+
+UnionType: TypeAlias = Union[str, int]
+
+
+ArrayType: TypeAlias = List[str]
+
+
+AnnotatedType: TypeAlias = Annotated[
+    Union[str, bool], Field(..., title='MyAnnotatedType')
+]
+
+
+class ModelWithTypeAliasField(BaseModel):
+    simple_field: Optional[SimpleString] = None
+    union_field: Optional[UnionType] = None
+    array_field: Optional[ArrayType] = None
+    annotated_field: Optional[AnnotatedType] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/type_alias_cross_module_collision_a.py 0.45.0-1/tests/data/expected/main/openapi/type_alias_cross_module_collision_a.py
--- 0.26.4-3/tests/data/expected/main/openapi/type_alias_cross_module_collision_a.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/type_alias_cross_module_collision_a.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+# generated by datamodel-codegen:
+#   filename:  a.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import TypeAlias
+
+Item: TypeAlias = str
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/type_alias_cross_module_collision_b.py 0.45.0-1/tests/data/expected/main/openapi/type_alias_cross_module_collision_b.py
--- 0.26.4-3/tests/data/expected/main/openapi/type_alias_cross_module_collision_b.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/type_alias_cross_module_collision_b.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+# generated by datamodel-codegen:
+#   filename:  b.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, TypeAlias
+
+Item: TypeAlias = List["Item"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/type_alias_forward_ref_multiple.py 0.45.0-1/tests/data/expected/main/openapi/type_alias_forward_ref_multiple.py
--- 0.26.4-3/tests/data/expected/main/openapi/type_alias_forward_ref_multiple.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/type_alias_forward_ref_multiple.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,22 @@
+# generated by datamodel-codegen:
+#   filename:  type_alias_forward_ref_multiple.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, TypeAlias, Union
+
+from pydantic import BaseModel
+
+
+class RegularModel(BaseModel):
+    name: Optional[str] = None
+
+
+Third: TypeAlias = str
+
+
+Second: TypeAlias = Union["First", Third, RegularModel]
+
+
+First: TypeAlias = Second
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/type_alias_mutual_recursive.py 0.45.0-1/tests/data/expected/main/openapi/type_alias_mutual_recursive.py
--- 0.26.4-3/tests/data/expected/main/openapi/type_alias_mutual_recursive.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/type_alias_mutual_recursive.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  type_alias_mutual_recursive.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import TypeAlias, Union
+
+NodeA: TypeAlias = Union[int, "NodeB"]
+
+
+NodeB: TypeAlias = Union[str, NodeA]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/type_alias_py312.py 0.45.0-1/tests/data/expected/main/openapi/type_alias_py312.py
--- 0.26.4-3/tests/data/expected/main/openapi/type_alias_py312.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/type_alias_py312.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+# generated by datamodel-codegen:
+#   filename:  type_alias.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Optional, Union
+
+from pydantic import BaseModel, Field
+
+type SimpleString = str
+
+
+type UnionType = Union[str, int]
+
+
+type ArrayType = List[str]
+
+
+type AnnotatedType = Annotated[Union[str, bool], Field(..., title='MyAnnotatedType')]
+
+
+class ModelWithTypeAliasField(BaseModel):
+    simple_field: Optional[SimpleString] = None
+    union_field: Optional[UnionType] = None
+    array_field: Optional[ArrayType] = None
+    annotated_field: Optional[AnnotatedType] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/type_alias_recursive.py 0.45.0-1/tests/data/expected/main/openapi/type_alias_recursive.py
--- 0.26.4-3/tests/data/expected/main/openapi/type_alias_recursive.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/type_alias_recursive.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,42 @@
+# generated by datamodel-codegen:
+#   filename:  type_alias_recursive.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, List, Optional, Union
+
+from pydantic import BaseModel
+from typing_extensions import TypeAlias
+
+
+class File(BaseModel):
+    path: str
+
+
+class Folder(BaseModel):
+    address: Optional[str] = None
+    files: List[File]
+    subfolders: Optional[List[Folder]] = None
+
+
+ElementaryType: TypeAlias = Optional[Union[bool, str, int, float]]
+
+
+JsonType: TypeAlias = Union[ElementaryType, List["JsonType"], Dict[str, "JsonType"]]
+
+
+class Space(BaseModel):
+    label: Optional[str] = None
+    data: Optional[JsonType] = None
+    dual: Optional[DualSpace] = None
+
+
+class DualSpace(BaseModel):
+    label: Optional[str] = None
+    data: Optional[JsonType] = None
+    predual: Optional[Space] = None
+
+
+Folder.update_forward_refs()
+Space.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/type_alias_recursive_py311.py 0.45.0-1/tests/data/expected/main/openapi/type_alias_recursive_py311.py
--- 0.26.4-3/tests/data/expected/main/openapi/type_alias_recursive_py311.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/type_alias_recursive_py311.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,41 @@
+# generated by datamodel-codegen:
+#   filename:  type_alias_recursive.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, List, Optional, TypeAlias, Union
+
+from pydantic import BaseModel
+
+
+class File(BaseModel):
+    path: str
+
+
+class Folder(BaseModel):
+    address: Optional[str] = None
+    files: List[File]
+    subfolders: Optional[List[Folder]] = None
+
+
+ElementaryType: TypeAlias = Optional[Union[bool, str, int, float]]
+
+
+JsonType: TypeAlias = Union[ElementaryType, List["JsonType"], Dict[str, "JsonType"]]
+
+
+class Space(BaseModel):
+    label: Optional[str] = None
+    data: Optional[JsonType] = None
+    dual: Optional[DualSpace] = None
+
+
+class DualSpace(BaseModel):
+    label: Optional[str] = None
+    data: Optional[JsonType] = None
+    predual: Optional[Space] = None
+
+
+Folder.update_forward_refs()
+Space.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/type_alias_recursive_py312.py 0.45.0-1/tests/data/expected/main/openapi/type_alias_recursive_py312.py
--- 0.26.4-3/tests/data/expected/main/openapi/type_alias_recursive_py312.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/type_alias_recursive_py312.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,39 @@
+# generated by datamodel-codegen:
+#   filename:  type_alias_recursive.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class File(BaseModel):
+    path: str
+
+
+class Folder(BaseModel):
+    address: str | None = None
+    files: list[File]
+    subfolders: list[Folder] | None = None
+
+
+type ElementaryType = bool | str | int | float | None
+
+
+type JsonType = ElementaryType | list[JsonType] | dict[str, JsonType]
+
+
+class Space(BaseModel):
+    label: str | None = None
+    data: JsonType | None = None
+    dual: DualSpace | None = None
+
+
+class DualSpace(BaseModel):
+    label: str | None = None
+    data: JsonType | None = None
+    predual: Space | None = None
+
+
+Folder.model_rebuild()
+Space.model_rebuild()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/typed_dict.py 0.45.0-1/tests/data/expected/main/openapi/typed_dict.py
--- 0.26.4-3/tests/data/expected/main/openapi/typed_dict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/typed_dict.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,56 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, TypedDict
+
+from typing_extensions import NotRequired, TypeAlias
+
+
+class Pet(TypedDict):
+    id: int
+    name: str
+    tag: NotRequired[str]
+
+
+Pets: TypeAlias = List[Pet]
+
+
+class User(TypedDict):
+    id: int
+    name: str
+    tag: NotRequired[str]
+
+
+Users: TypeAlias = List[User]
+
+
+Id: TypeAlias = str
+
+
+Rules: TypeAlias = List[str]
+
+
+class Error(TypedDict):
+    code: int
+    message: str
+
+
+class Api(TypedDict):
+    apiKey: NotRequired[str]
+    apiVersionNumber: NotRequired[str]
+    apiUrl: NotRequired[str]
+    apiDocumentationUrl: NotRequired[str]
+
+
+Apis: TypeAlias = List[Api]
+
+
+class Event(TypedDict):
+    name: NotRequired[str]
+
+
+class Result(TypedDict):
+    event: NotRequired[Event]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/typed_dict_nullable.py 0.45.0-1/tests/data/expected/main/openapi/typed_dict_nullable.py
--- 0.26.4-3/tests/data/expected/main/openapi/typed_dict_nullable.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/typed_dict_nullable.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,67 @@
+# generated by datamodel-codegen:
+#   filename:  nullable.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, NotRequired, Optional, TypeAlias, TypedDict, Union
+
+
+class Cursors(TypedDict):
+    prev: str
+    next: NotRequired[str]
+    index: float
+    tag: NotRequired[str]
+
+
+class TopLevel(TypedDict):
+    cursors: Cursors
+
+
+class Info(TypedDict):
+    name: str
+
+
+class User(TypedDict):
+    info: Info
+
+
+class Api(TypedDict):
+    apiKey: NotRequired[str]
+    apiVersionNumber: NotRequired[str]
+    apiUrl: NotRequired[str]
+    apiDocumentationUrl: NotRequired[str]
+
+
+Apis: TypeAlias = Optional[List[Api]]
+
+
+class EmailItem(TypedDict):
+    author: str
+    address: str
+    description: NotRequired[str]
+    tag: NotRequired[str]
+
+
+Email: TypeAlias = List[EmailItem]
+
+
+Id: TypeAlias = int
+
+
+Description: TypeAlias = Optional[str]
+
+
+Name: TypeAlias = Optional[str]
+
+
+Tag: TypeAlias = str
+
+
+class Notes(TypedDict):
+    comments: NotRequired[List[str]]
+
+
+class Options(TypedDict):
+    comments: List[str]
+    oneOfComments: List[Union[str, float]]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/typed_dict_nullable_strict_nullable.py 0.45.0-1/tests/data/expected/main/openapi/typed_dict_nullable_strict_nullable.py
--- 0.26.4-3/tests/data/expected/main/openapi/typed_dict_nullable_strict_nullable.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/typed_dict_nullable_strict_nullable.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,67 @@
+# generated by datamodel-codegen:
+#   filename:  nullable.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, NotRequired, Optional, TypeAlias, TypedDict, Union
+
+
+class Cursors(TypedDict):
+    prev: Optional[str]
+    next: NotRequired[str]
+    index: float
+    tag: NotRequired[str]
+
+
+class TopLevel(TypedDict):
+    cursors: Cursors
+
+
+class Info(TypedDict):
+    name: str
+
+
+class User(TypedDict):
+    info: Info
+
+
+class Api(TypedDict):
+    apiKey: NotRequired[str]
+    apiVersionNumber: NotRequired[str]
+    apiUrl: NotRequired[Optional[str]]
+    apiDocumentationUrl: NotRequired[Optional[str]]
+
+
+Apis: TypeAlias = Optional[List[Api]]
+
+
+class EmailItem(TypedDict):
+    author: str
+    address: str
+    description: NotRequired[str]
+    tag: NotRequired[str]
+
+
+Email: TypeAlias = List[EmailItem]
+
+
+Id: TypeAlias = int
+
+
+Description: TypeAlias = Optional[str]
+
+
+Name: TypeAlias = Optional[str]
+
+
+Tag: TypeAlias = str
+
+
+class Notes(TypedDict):
+    comments: NotRequired[List[str]]
+
+
+class Options(TypedDict):
+    comments: List[Optional[str]]
+    oneOfComments: List[Union[Optional[str], Optional[float]]]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/typed_dict_py.py 0.45.0-1/tests/data/expected/main/openapi/typed_dict_py.py
--- 0.26.4-3/tests/data/expected/main/openapi/typed_dict_py.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/typed_dict_py.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,56 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, TypedDict
+
+from typing_extensions import NotRequired, TypeAlias
+
+
+class Pet(TypedDict):
+    id: int
+    name: str
+    tag: NotRequired[str]
+
+
+Pets: TypeAlias = List[Pet]
+
+
+class User(TypedDict):
+    id: int
+    name: str
+    tag: NotRequired[str]
+
+
+Users: TypeAlias = List[User]
+
+
+Id: TypeAlias = str
+
+
+Rules: TypeAlias = List[str]
+
+
+class Error(TypedDict):
+    code: int
+    message: str
+
+
+class Api(TypedDict):
+    apiKey: NotRequired[str]
+    apiVersionNumber: NotRequired[str]
+    apiUrl: NotRequired[str]
+    apiDocumentationUrl: NotRequired[str]
+
+
+Apis: TypeAlias = List[Api]
+
+
+class Event(TypedDict):
+    name: NotRequired[str]
+
+
+class Result(TypedDict):
+    event: NotRequired[Event]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/union_default_object.py 0.45.0-1/tests/data/expected/main/openapi/union_default_object.py
--- 0.26.4-3/tests/data/expected/main/openapi/union_default_object.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/union_default_object.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,22 @@
+# generated by datamodel-codegen:
+#   filename:  union_default_object.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Dict, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Interval(BaseModel):
+    start: Optional[int] = None
+    end: Optional[int] = None
+
+
+class Container(BaseModel):
+    interval_or_string: Optional[Union[Interval, str]] = Field(
+        default_factory=lambda: Interval.parse_obj({'start': 2009, 'end': 2019})
+    )
+    string_or_interval: Optional[Union[Interval, str]] = 'some string value'
+    dict_or_interval: Optional[Union[Dict[str, str], Interval]] = {'key': 'value'}
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/unique_items_default_set_dataclass.py 0.45.0-1/tests/data/expected/main/openapi/unique_items_default_set_dataclass.py
--- 0.26.4-3/tests/data/expected/main/openapi/unique_items_default_set_dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/unique_items_default_set_dataclass.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  unique_items_default_set.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass, field
+from typing import Optional, Set
+
+
+@dataclass
+class TestModel:
+    tags: Optional[Set[str]] = field(default_factory=lambda: {'tag1', 'tag2'})
+    empty_tags: Optional[Set[str]] = field(default_factory=set)
+    numbers: Optional[Set[int]] = field(default_factory=lambda: {1, 2, 3})
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/unique_items_default_set_msgspec.py 0.45.0-1/tests/data/expected/main/openapi/unique_items_default_set_msgspec.py
--- 0.26.4-3/tests/data/expected/main/openapi/unique_items_default_set_msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/unique_items_default_set_msgspec.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  unique_items_default_set.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Set, Union
+
+from msgspec import Struct, UnsetType, field
+
+
+class TestModel(Struct):
+    tags: Union[Set[str], UnsetType] = field(default_factory=lambda: {'tag1', 'tag2'})
+    empty_tags: Union[Set[str], UnsetType] = field(default_factory=set)
+    numbers: Union[Set[int], UnsetType] = field(default_factory=lambda: {1, 2, 3})
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/unique_items_default_set_pydantic.py 0.45.0-1/tests/data/expected/main/openapi/unique_items_default_set_pydantic.py
--- 0.26.4-3/tests/data/expected/main/openapi/unique_items_default_set_pydantic.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/unique_items_default_set_pydantic.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  unique_items_default_set.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Set
+
+from pydantic import BaseModel, Field
+
+
+class TestModel(BaseModel):
+    tags: Optional[Set[str]] = Field({'tag1', 'tag2'}, unique_items=True)
+    empty_tags: Optional[Set[str]] = Field(set(), unique_items=True)
+    numbers: Optional[Set[int]] = Field({1, 2, 3}, unique_items=True)
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/unique_items_default_set_pydantic_v2.py 0.45.0-1/tests/data/expected/main/openapi/unique_items_default_set_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/unique_items_default_set_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/unique_items_default_set_pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  unique_items_default_set.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Set
+
+from pydantic import BaseModel
+
+
+class TestModel(BaseModel):
+    tags: Optional[Set[str]] = {'tag1', 'tag2'}
+    empty_tags: Optional[Set[str]] = set()
+    numbers: Optional[Set[int]] = {1, 2, 3}
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/unquoted_null.py 0.45.0-1/tests/data/expected/main/openapi/unquoted_null.py
--- 0.26.4-3/tests/data/expected/main/openapi/unquoted_null.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/unquoted_null.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  unquoted_null.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, RootModel
+
+
+class Thing(BaseModel):
+    value: Optional[str] = None
+
+
+class NullThing(RootModel[None]):
+    root: None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/unsorted_optional_fields.py 0.45.0-1/tests/data/expected/main/openapi/unsorted_optional_fields.py
--- 0.26.4-3/tests/data/expected/main/openapi/unsorted_optional_fields.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/unsorted_optional_fields.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  unsorted_optional_fields.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from dataclasses import dataclass
+from typing import Optional
+
+
+@dataclass
+class Note:
+    text: str
+    author: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_annotated_with_field_constraints.py 0.45.0-1/tests/data/expected/main/openapi/use_annotated_with_field_constraints.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_annotated_with_field_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_annotated_with_field_constraints.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: Annotated[int, Field(ge=0, le=9223372036854775807)]
+    name: Annotated[str, Field(max_length=256)]
+    tag: Annotated[Optional[str], Field(max_length=64)] = None
+
+
+class Pets(BaseModel):
+    __root__: Annotated[List[Pet], Field(max_items=10, min_items=1, unique_items=True)]
+
+
+class UID(BaseModel):
+    __root__: Annotated[int, Field(ge=0)]
+
+
+class Phone(BaseModel):
+    __root__: Annotated[str, Field(min_length=3)]
+
+
+class FaxItem(BaseModel):
+    __root__: Annotated[str, Field(min_length=3)]
+
+
+class User(BaseModel):
+    id: Annotated[int, Field(ge=0)]
+    name: Annotated[str, Field(max_length=256)]
+    tag: Annotated[Optional[str], Field(max_length=64)] = None
+    uid: UID
+    phones: Annotated[Optional[List[Phone]], Field(max_items=10)] = None
+    fax: Optional[List[FaxItem]] = None
+    height: Annotated[Optional[Union[int, float]], Field(ge=1.0, le=300.0)] = None
+    weight: Annotated[Optional[Union[float, int]], Field(ge=1.0, le=1000.0)] = None
+    age: Annotated[Optional[int], Field(gt=0, le=200)] = None
+    rating: Annotated[Optional[float], Field(gt=0.0, le=5.0)] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Annotated[
+        Optional[str], Field(description='To be used as a dataset parameter value')
+    ] = None
+    apiVersionNumber: Annotated[
+        Optional[str], Field(description='To be used as a version parameter value')
+    ] = None
+    apiUrl: Annotated[
+        Optional[AnyUrl], Field(description="The URL describing the dataset's fields")
+    ] = None
+    apiDocumentationUrl: Annotated[
+        Optional[AnyUrl], Field(description='A URL to the API console for each API')
+    ] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_annotated_with_field_constraints_py38.py 0.45.0-1/tests/data/expected/main/openapi/use_annotated_with_field_constraints_py38.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_annotated_with_field_constraints_py38.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_annotated_with_field_constraints_py38.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: Annotated[int, Field(ge=0, le=9223372036854775807)]
+    name: Annotated[str, Field(max_length=256)]
+    tag: Annotated[Optional[str], Field(max_length=64)] = None
+
+
+class Pets(BaseModel):
+    __root__: Annotated[List[Pet], Field(max_items=10, min_items=1, unique_items=True)]
+
+
+class UID(BaseModel):
+    __root__: Annotated[int, Field(ge=0)]
+
+
+class Phone(BaseModel):
+    __root__: Annotated[str, Field(min_length=3)]
+
+
+class FaxItem(BaseModel):
+    __root__: Annotated[str, Field(min_length=3)]
+
+
+class User(BaseModel):
+    id: Annotated[int, Field(ge=0)]
+    name: Annotated[str, Field(max_length=256)]
+    tag: Annotated[Optional[str], Field(max_length=64)] = None
+    uid: UID
+    phones: Annotated[Optional[List[Phone]], Field(max_items=10)] = None
+    fax: Optional[List[FaxItem]] = None
+    height: Annotated[Optional[Union[int, float]], Field(ge=1.0, le=300.0)] = None
+    weight: Annotated[Optional[Union[float, int]], Field(ge=1.0, le=1000.0)] = None
+    age: Annotated[Optional[int], Field(gt=0, le=200)] = None
+    rating: Annotated[Optional[float], Field(gt=0.0, le=5.0)] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Annotated[
+        Optional[str], Field(description='To be used as a dataset parameter value')
+    ] = None
+    apiVersionNumber: Annotated[
+        Optional[str], Field(description='To be used as a version parameter value')
+    ] = None
+    apiUrl: Annotated[
+        Optional[AnyUrl], Field(description="The URL describing the dataset's fields")
+    ] = None
+    apiDocumentationUrl: Annotated[
+        Optional[AnyUrl], Field(description='A URL to the API console for each API')
+    ] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_annotated_with_field_constraints_pydantic_v2.py 0.45.0-1/tests/data/expected/main/openapi/use_annotated_with_field_constraints_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_annotated_with_field_constraints_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_annotated_with_field_constraints_pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field, RootModel
+
+
+class Pet(BaseModel):
+    id: Annotated[int, Field(ge=0, le=9223372036854775807)]
+    name: Annotated[str, Field(max_length=256)]
+    tag: Annotated[Optional[str], Field(max_length=64)] = None
+
+
+class Pets(RootModel[List[Pet]]):
+    root: Annotated[List[Pet], Field(max_length=10, min_length=1)]
+
+
+class UID(RootModel[int]):
+    root: Annotated[int, Field(ge=0)]
+
+
+class Phone(RootModel[str]):
+    root: Annotated[str, Field(min_length=3)]
+
+
+class FaxItem(RootModel[str]):
+    root: Annotated[str, Field(min_length=3)]
+
+
+class User(BaseModel):
+    id: Annotated[int, Field(ge=0)]
+    name: Annotated[str, Field(max_length=256)]
+    tag: Annotated[Optional[str], Field(max_length=64)] = None
+    uid: UID
+    phones: Annotated[Optional[List[Phone]], Field(max_length=10)] = None
+    fax: Optional[List[FaxItem]] = None
+    height: Annotated[Optional[Union[int, float]], Field(ge=1.0, le=300.0)] = None
+    weight: Annotated[Optional[Union[float, int]], Field(ge=1.0, le=1000.0)] = None
+    age: Annotated[Optional[int], Field(gt=0, le=200)] = None
+    rating: Annotated[Optional[float], Field(gt=0.0, le=5.0)] = None
+
+
+class Users(RootModel[List[User]]):
+    root: List[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[List[str]]):
+    root: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Annotated[
+        Optional[str], Field(description='To be used as a dataset parameter value')
+    ] = None
+    apiVersionNumber: Annotated[
+        Optional[str], Field(description='To be used as a version parameter value')
+    ] = None
+    apiUrl: Annotated[
+        Optional[AnyUrl], Field(description="The URL describing the dataset's fields")
+    ] = None
+    apiDocumentationUrl: Annotated[
+        Optional[AnyUrl], Field(description='A URL to the API console for each API')
+    ] = None
+
+
+class Apis(RootModel[List[Api]]):
+    root: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_default.py 0.45.0-1/tests/data/expected/main/openapi/use_default.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_default.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_default.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: Optional[int] = 1
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_default_kwarg.py 0.45.0-1/tests/data/expected/main/openapi/use_default_kwarg.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_default_kwarg.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_default_kwarg.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,83 @@
+# generated by datamodel-codegen:
+#   filename:  nullable.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Cursors(BaseModel):
+    prev: str
+    next: Optional[str] = 'last'
+    index: float
+    tag: Optional[str] = None
+
+
+class TopLevel(BaseModel):
+    cursors: Cursors
+
+
+class Info(BaseModel):
+    name: str
+
+
+class User(BaseModel):
+    info: Info
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        default=None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        default=None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        default=None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        default=None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: Optional[List[Api]] = None
+
+
+class EmailItem(BaseModel):
+    author: str
+    address: str = Field(..., description='email address')
+    description: Optional[str] = 'empty'
+    tag: Optional[str] = None
+
+
+class Email(BaseModel):
+    __root__: List[EmailItem]
+
+
+class Id(BaseModel):
+    __root__: int
+
+
+class Description(BaseModel):
+    __root__: Optional[str] = 'example'
+
+
+class Name(BaseModel):
+    __root__: Optional[str] = None
+
+
+class Tag(BaseModel):
+    __root__: str
+
+
+class Notes(BaseModel):
+    comments: List[str] = Field(default_factory=list)
+
+
+class Options(BaseModel):
+    comments: List[str]
+    oneOfComments: List[Union[str, float]]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/__init__.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import DifferentTea, Error, Id, OptionalModel, Result, Source
+
+__all__ = ["DifferentTea", "Error", "Id", "OptionalModel", "Result", "Source"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/_internal.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/_internal.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,67 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Sequence
+
+from pydantic import BaseModel
+
+from . import models
+
+
+class OptionalModel(BaseModel):
+    __root__: str
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Result(BaseModel):
+    event: Optional[models.Event] = None
+
+
+class Source(BaseModel):
+    country: Optional[str] = None
+
+
+class DifferentTea(BaseModel):
+    foo: Optional[Tea] = None
+    nested: Optional[Tea_1] = None
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+
+
+class Cocoa(BaseModel):
+    quality: Optional[int] = None
+
+
+class Tea_1(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[Sequence[OptionalModel]] = None
+
+
+class TeaClone(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[Sequence[OptionalModel]] = None
+
+
+class List(BaseModel):
+    __root__: Sequence[Tea_1]
+
+
+Tea_1.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/bar.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class FieldModel(BaseModel):
+    __root__: str = Field(..., example='green')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/collections.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/collections.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,51 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional, Sequence
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import models
+
+
+class Pets(BaseModel):
+    __root__: Sequence[models.Pet]
+
+
+class Users(BaseModel):
+    __root__: Sequence[models.User]
+
+
+class Rules(BaseModel):
+    __root__: Sequence[str]
+
+
+class Stage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    stage: Optional[Stage] = None
+
+
+class Apis(BaseModel):
+    __root__: Sequence[Api]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/foo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/foo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import Cocoa, Tea
+
+__all__ = ["Cocoa", "Tea"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/foo/bar.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/foo/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/foo/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Mapping, Optional, Sequence
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    attributes: Optional[Mapping[str, Any]] = None
+
+
+class Thang(BaseModel):
+    attributes: Optional[Sequence[Mapping[str, Any]]] = None
+
+
+class Others(BaseModel):
+    name: Optional[str] = None
+
+
+class Clone(Thing):
+    others: Optional[Others] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/models.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/models.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/models.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,35 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Mapping, Optional, Sequence, Union
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[Species] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Event(BaseModel):
+    name: Optional[
+        Union[str, float, int, bool, Mapping[str, Any], Sequence[str]]
+    ] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/nested/__init__.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/nested/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/nested/foo.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/nested/foo.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/nested/foo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import List
+from .._internal import Tea_1 as Tea
+from .._internal import TeaClone
+
+__all__ = ["List", "Tea", "TeaClone"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/woo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/woo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/woo/boo.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/woo/boo.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types/woo/boo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import bar
+from .._internal import Cocoa, Source
+
+
+class Chocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[Source] = None
+    cocoa: Optional[Cocoa] = None
+    field: Optional[bar.FieldModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/__init__.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import DifferentTea, Error, Id, OptionalModel, Result, Source
+
+__all__ = ["DifferentTea", "Error", "Id", "OptionalModel", "Result", "Source"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/_internal.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/_internal.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,68 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from collections.abc import Sequence
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import models
+
+
+class OptionalModel(BaseModel):
+    __root__: str
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Result(BaseModel):
+    event: Optional[models.Event] = None
+
+
+class Source(BaseModel):
+    country: Optional[str] = None
+
+
+class DifferentTea(BaseModel):
+    foo: Optional[Tea] = None
+    nested: Optional[Tea_1] = None
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+
+
+class Cocoa(BaseModel):
+    quality: Optional[int] = None
+
+
+class Tea_1(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[Sequence[OptionalModel]] = None
+
+
+class TeaClone(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[Sequence[OptionalModel]] = None
+
+
+class List(BaseModel):
+    __root__: Sequence[Tea_1]
+
+
+Tea_1.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/bar.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class FieldModel(BaseModel):
+    __root__: str = Field(..., example='green')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/collections.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/collections.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,52 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from collections.abc import Sequence
+from enum import Enum
+from typing import Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import models
+
+
+class Pets(BaseModel):
+    __root__: Sequence[models.Pet]
+
+
+class Users(BaseModel):
+    __root__: Sequence[models.User]
+
+
+class Rules(BaseModel):
+    __root__: Sequence[str]
+
+
+class Stage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    stage: Optional[Stage] = None
+
+
+class Apis(BaseModel):
+    __root__: Sequence[Api]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/foo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/foo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import Cocoa, Tea
+
+__all__ = ["Cocoa", "Tea"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/foo/bar.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/foo/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/foo/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from collections.abc import Mapping, Sequence
+from typing import Any, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    attributes: Optional[Mapping[str, Any]] = None
+
+
+class Thang(BaseModel):
+    attributes: Optional[Sequence[Mapping[str, Any]]] = None
+
+
+class Others(BaseModel):
+    name: Optional[str] = None
+
+
+class Clone(Thing):
+    others: Optional[Others] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/models.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/models.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/models.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,36 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from collections.abc import Mapping, Sequence
+from enum import Enum
+from typing import Any, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[Species] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Event(BaseModel):
+    name: Optional[
+        Union[str, float, int, bool, Mapping[str, Any], Sequence[str]]
+    ] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/nested/__init__.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/nested/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/nested/foo.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/nested/foo.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/nested/foo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import List
+from .._internal import Tea_1 as Tea
+from .._internal import TeaClone
+
+__all__ = ["List", "Tea", "TeaClone"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/woo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/woo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/woo/boo.py 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/woo/boo.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_generic_container_types_standard_collections/woo/boo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import bar
+from .._internal import Cocoa, Source
+
+
+class Chocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[Source] = None
+    cocoa: Optional[Cocoa] = None
+    field: Optional[bar.FieldModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_operation_id_as_name.py 0.45.0-1/tests/data/expected/main/openapi/use_operation_id_as_name.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_operation_id_as_name.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_operation_id_as_name.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,73 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
+
+
+class ListPetsParametersQuery(BaseModel):
+    limit: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/__init__.py 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import DifferentTea, Error, Id, OptionalModel, Result, Source
+
+__all__ = ["DifferentTea", "Error", "Id", "OptionalModel", "Result", "Source"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/_internal.py 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/_internal.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,67 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from . import models
+
+
+class OptionalModel(BaseModel):
+    __root__: str
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Result(BaseModel):
+    event: Optional[models.Event] = None
+
+
+class Source(BaseModel):
+    country: Optional[str] = None
+
+
+class DifferentTea(BaseModel):
+    foo: Optional[Tea] = None
+    nested: Optional[Tea_1] = None
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+
+
+class Cocoa(BaseModel):
+    quality: Optional[int] = None
+
+
+class Tea_1(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[list[OptionalModel]] = None
+
+
+class TeaClone(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[list[OptionalModel]] = None
+
+
+class List(BaseModel):
+    __root__: list[Tea_1]
+
+
+Tea_1.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/bar.py 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class FieldModel(BaseModel):
+    __root__: str = Field(..., example='green')
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/collections.py 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/collections.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,51 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import models
+
+
+class Pets(BaseModel):
+    __root__: list[models.Pet]
+
+
+class Users(BaseModel):
+    __root__: list[models.User]
+
+
+class Rules(BaseModel):
+    __root__: list[str]
+
+
+class Stage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    stage: Optional[Stage] = None
+
+
+class Apis(BaseModel):
+    __root__: list[Api]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/foo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/foo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import Cocoa, Tea
+
+__all__ = ["Cocoa", "Tea"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/foo/bar.py 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/foo/bar.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/foo/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    attributes: Optional[dict[str, Any]] = None
+
+
+class Thang(BaseModel):
+    attributes: Optional[list[dict[str, Any]]] = None
+
+
+class Others(BaseModel):
+    name: Optional[str] = None
+
+
+class Clone(Thing):
+    others: Optional[Others] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/models.py 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/models.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/models.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[Species] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Event(BaseModel):
+    name: Optional[Union[str, float, int, bool, dict[str, Any], list[str]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/nested/__init__.py 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/nested/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/nested/foo.py 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/nested/foo.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/nested/foo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import List
+from .._internal import Tea_1 as Tea
+from .._internal import TeaClone
+
+__all__ = ["List", "Tea", "TeaClone"]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/woo/__init__.py 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/woo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/woo/boo.py 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/woo/boo.py
--- 0.26.4-3/tests/data/expected/main/openapi/use_standard_collections/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/use_standard_collections/woo/boo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import bar
+from .._internal import Cocoa, Source
+
+
+class Chocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[Source] = None
+    cocoa: Optional[Cocoa] = None
+    field: Optional[bar.FieldModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/validation.py 0.45.0-1/tests/data/expected/main/openapi/validation.py
--- 0.26.4-3/tests/data/expected/main/openapi/validation.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/validation.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/webhooks_ref_with_external_schema.py 0.45.0-1/tests/data/expected/main/openapi/webhooks_ref_with_external_schema.py
--- 0.26.4-3/tests/data/expected/main/openapi/webhooks_ref_with_external_schema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/webhooks_ref_with_external_schema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  openapi.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_aliases.py 0.45.0-1/tests/data/expected/main/openapi/with_aliases.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_aliases.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/with_aliases.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id_: int = Field(..., alias='id')
+    name_: str = Field(..., alias='name')
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id_: int = Field(..., alias='id')
+    name_: str = Field(..., alias='name')
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name_: Optional[str] = Field(None, alias='name')
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_aliases_msgspec.py 0.45.0-1/tests/data/expected/main/openapi/with_aliases_msgspec.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_aliases_msgspec.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/with_aliases_msgspec.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Annotated, List, Union
+
+from msgspec import UNSET, Meta, Struct, UnsetType, field
+from typing_extensions import TypeAlias
+
+
+class Pet(Struct):
+    id_: int = field(name='id')
+    name_: str = field(name='name')
+    tag: Union[str, UnsetType] = UNSET
+
+
+Pets: TypeAlias = List[Pet]
+
+
+class User(Struct):
+    id_: int = field(name='id')
+    name_: str = field(name='name')
+    tag: Union[str, UnsetType] = UNSET
+
+
+Users: TypeAlias = List[User]
+
+
+Id: TypeAlias = str
+
+
+Rules: TypeAlias = List[str]
+
+
+class Error(Struct):
+    code: int
+    message: str
+
+
+class Api(Struct):
+    apiKey: Union[
+        Annotated[str, Meta(description='To be used as a dataset parameter value')],
+        UnsetType,
+    ] = UNSET
+    apiVersionNumber: Union[
+        Annotated[str, Meta(description='To be used as a version parameter value')],
+        UnsetType,
+    ] = UNSET
+    apiUrl: Union[
+        Annotated[str, Meta(description="The URL describing the dataset's fields")],
+        UnsetType,
+    ] = UNSET
+    apiDocumentationUrl: Union[
+        Annotated[str, Meta(description='A URL to the API console for each API')],
+        UnsetType,
+    ] = UNSET
+
+
+Apis: TypeAlias = List[Api]
+
+
+class Event(Struct):
+    name_: Union[str, UnsetType] = field(name='name', default=UNSET)
+
+
+class Result(Struct):
+    event: Union[Event, UnsetType] = UNSET
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_exclusive.py 0.45.0-1/tests/data/expected/main/openapi/with_exclusive.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_exclusive.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/with_exclusive.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  exclusive.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, conint
+
+
+class MaximumProblem(BaseModel):
+    status: Optional[conint(ge=100, lt=600)] = None
+
+
+class MinimumProblem(BaseModel):
+    status: Optional[conint(le=600, gt=100)] = None
+
+
+class MinimumMaximumProblem(BaseModel):
+    status: Optional[conint(lt=600, gt=100)] = None
+
+
+class Problem(BaseModel):
+    status: Optional[conint(ge=100, le=600)] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints.py 0.45.0-1/tests/data/expected/main/openapi/with_field_constraints.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/with_field_constraints.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int = Field(..., ge=0, le=9223372036854775807)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+
+
+class Pets(BaseModel):
+    __root__: List[Pet] = Field(..., max_items=10, min_items=1, unique_items=True)
+
+
+class UID(BaseModel):
+    __root__: int = Field(..., ge=0)
+
+
+class Phone(BaseModel):
+    __root__: str = Field(..., min_length=3)
+
+
+class FaxItem(BaseModel):
+    __root__: str = Field(..., min_length=3)
+
+
+class User(BaseModel):
+    id: int = Field(..., ge=0)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+    uid: UID
+    phones: Optional[List[Phone]] = Field(None, max_items=10)
+    fax: Optional[List[FaxItem]] = None
+    height: Optional[Union[int, float]] = Field(None, ge=1.0, le=300.0)
+    weight: Optional[Union[float, int]] = Field(None, ge=1.0, le=1000.0)
+    age: Optional[int] = Field(None, gt=0, le=200)
+    rating: Optional[float] = Field(None, gt=0.0, le=5.0)
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2.py 0.45.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field, RootModel
+
+
+class Pet(BaseModel):
+    id: int = Field(..., ge=0, le=9223372036854775807)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+
+
+class Pets(RootModel[List[Pet]]):
+    root: List[Pet] = Field(..., max_length=10, min_length=1)
+
+
+class UID(RootModel[int]):
+    root: int = Field(..., ge=0)
+
+
+class Phone(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class FaxItem(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class User(BaseModel):
+    id: int = Field(..., ge=0)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+    uid: UID
+    phones: Optional[List[Phone]] = Field(None, max_length=10)
+    fax: Optional[List[FaxItem]] = None
+    height: Optional[Union[int, float]] = Field(None, ge=1.0, le=300.0)
+    weight: Optional[Union[float, int]] = Field(None, ge=1.0, le=1000.0)
+    age: Optional[int] = Field(None, gt=0, le=200)
+    rating: Optional[float] = Field(None, gt=0.0, le=5.0)
+
+
+class Users(RootModel[List[User]]):
+    root: List[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[List[str]]):
+    root: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[List[Api]]):
+    root: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_generic_container_types.py 0.45.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_generic_container_types.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_generic_container_types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_generic_container_types.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Sequence, Union
+
+from pydantic import AnyUrl, BaseModel, Field, RootModel
+
+
+class Pet(BaseModel):
+    id: int = Field(..., ge=0, le=9223372036854775807)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+
+
+class Pets(RootModel[Sequence[Pet]]):
+    root: Sequence[Pet] = Field(..., max_length=10, min_length=1)
+
+
+class UID(RootModel[int]):
+    root: int = Field(..., ge=0)
+
+
+class Phone(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class FaxItem(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class User(BaseModel):
+    id: int = Field(..., ge=0)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+    uid: UID
+    phones: Optional[Sequence[Phone]] = Field(None, max_length=10)
+    fax: Optional[Sequence[FaxItem]] = None
+    height: Optional[Union[int, float]] = Field(None, ge=1.0, le=300.0)
+    weight: Optional[Union[float, int]] = Field(None, ge=1.0, le=1000.0)
+    age: Optional[int] = Field(None, gt=0, le=200)
+    rating: Optional[float] = Field(None, gt=0.0, le=5.0)
+
+
+class Users(RootModel[Sequence[User]]):
+    root: Sequence[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[Sequence[str]]):
+    root: Sequence[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[Sequence[Api]]):
+    root: Sequence[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_generic_container_types_set.py 0.45.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_generic_container_types_set.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_generic_container_types_set.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_generic_container_types_set.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import FrozenSet, Optional, Sequence, Union
+
+from pydantic import AnyUrl, BaseModel, Field, RootModel
+
+
+class Pet(BaseModel):
+    id: int = Field(..., ge=0, le=9223372036854775807)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+
+
+class Pets(RootModel[FrozenSet[Pet]]):
+    root: FrozenSet[Pet] = Field(..., max_length=10, min_length=1)
+
+
+class UID(RootModel[int]):
+    root: int = Field(..., ge=0)
+
+
+class Phone(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class FaxItem(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class User(BaseModel):
+    id: int = Field(..., ge=0)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+    uid: UID
+    phones: Optional[Sequence[Phone]] = Field(None, max_length=10)
+    fax: Optional[Sequence[FaxItem]] = None
+    height: Optional[Union[int, float]] = Field(None, ge=1.0, le=300.0)
+    weight: Optional[Union[float, int]] = Field(None, ge=1.0, le=1000.0)
+    age: Optional[int] = Field(None, gt=0, le=200)
+    rating: Optional[float] = Field(None, gt=0.0, le=5.0)
+
+
+class Users(RootModel[Sequence[User]]):
+    root: Sequence[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[Sequence[str]]):
+    root: Sequence[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[Sequence[Api]]):
+    root: Sequence[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_standard_collections.py 0.45.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_standard_collections.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_standard_collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_standard_collections.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field, RootModel
+
+
+class Pet(BaseModel):
+    id: int = Field(..., ge=0, le=9223372036854775807)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+
+
+class Pets(RootModel[list[Pet]]):
+    root: list[Pet] = Field(..., max_length=10, min_length=1)
+
+
+class UID(RootModel[int]):
+    root: int = Field(..., ge=0)
+
+
+class Phone(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class FaxItem(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class User(BaseModel):
+    id: int = Field(..., ge=0)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+    uid: UID
+    phones: Optional[list[Phone]] = Field(None, max_length=10)
+    fax: Optional[list[FaxItem]] = None
+    height: Optional[Union[int, float]] = Field(None, ge=1.0, le=300.0)
+    weight: Optional[Union[float, int]] = Field(None, ge=1.0, le=1000.0)
+    age: Optional[int] = Field(None, gt=0, le=200)
+    rating: Optional[float] = Field(None, gt=0.0, le=5.0)
+
+
+class Users(RootModel[list[User]]):
+    root: list[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[list[str]]):
+    root: list[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[list[Api]]):
+    root: list[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_standard_collections_set.py 0.45.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_standard_collections_set.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_standard_collections_set.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/with_field_constraints_pydantic_v2_use_standard_collections_set.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field, RootModel
+
+
+class Pet(BaseModel):
+    id: int = Field(..., ge=0, le=9223372036854775807)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+
+
+class Pets(RootModel[set[Pet]]):
+    root: set[Pet] = Field(..., max_length=10, min_length=1)
+
+
+class UID(RootModel[int]):
+    root: int = Field(..., ge=0)
+
+
+class Phone(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class FaxItem(RootModel[str]):
+    root: str = Field(..., min_length=3)
+
+
+class User(BaseModel):
+    id: int = Field(..., ge=0)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+    uid: UID
+    phones: Optional[list[Phone]] = Field(None, max_length=10)
+    fax: Optional[list[FaxItem]] = None
+    height: Optional[Union[int, float]] = Field(None, ge=1.0, le=300.0)
+    weight: Optional[Union[float, int]] = Field(None, ge=1.0, le=1000.0)
+    age: Optional[int] = Field(None, gt=0, le=200)
+    rating: Optional[float] = Field(None, gt=0.0, le=5.0)
+
+
+class Users(RootModel[list[User]]):
+    root: list[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[list[str]]):
+    root: list[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[list[Api]]):
+    root: list[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_use_unique_items_as_set.py 0.45.0-1/tests/data/expected/main/openapi/with_field_constraints_use_unique_items_as_set.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_field_constraints_use_unique_items_as_set.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/with_field_constraints_use_unique_items_as_set.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,88 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Set, Union
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int = Field(..., ge=0, le=9223372036854775807)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+
+
+class Pets(BaseModel):
+    __root__: Set[Pet] = Field(..., max_items=10, min_items=1, unique_items=True)
+
+
+class UID(BaseModel):
+    __root__: int = Field(..., ge=0)
+
+
+class Phone(BaseModel):
+    __root__: str = Field(..., min_length=3)
+
+
+class FaxItem(BaseModel):
+    __root__: str = Field(..., min_length=3)
+
+
+class User(BaseModel):
+    id: int = Field(..., ge=0)
+    name: str = Field(..., max_length=256)
+    tag: Optional[str] = Field(None, max_length=64)
+    uid: UID
+    phones: Optional[List[Phone]] = Field(None, max_items=10)
+    fax: Optional[List[FaxItem]] = None
+    height: Optional[Union[int, float]] = Field(None, ge=1.0, le=300.0)
+    weight: Optional[Union[float, int]] = Field(None, ge=1.0, le=1000.0)
+    age: Optional[int] = Field(None, gt=0, le=200)
+    rating: Optional[float] = Field(None, gt=0.0, le=5.0)
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_snake_case_field.py 0.45.0-1/tests/data/expected/main/openapi/with_snake_case_field.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_snake_case_field.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/with_snake_case_field.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,73 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    api_key: Optional[str] = Field(
+        None, alias='apiKey', description='To be used as a dataset parameter value'
+    )
+    api_version_number: Optional[str] = Field(
+        None,
+        alias='apiVersionNumber',
+        description='To be used as a version parameter value',
+    )
+    api_url: Optional[AnyUrl] = Field(
+        None, alias='apiUrl', description="The URL describing the dataset's fields"
+    )
+    api_documentation_url: Optional[AnyUrl] = Field(
+        None,
+        alias='apiDocumentationUrl',
+        description='A URL to the API console for each API',
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/with_strip_default_none.py 0.45.0-1/tests/data/expected/main/openapi/with_strip_default_none.py
--- 0.26.4-3/tests/data/expected/main/openapi/with_strip_default_none.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/with_strip_default_none.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str]
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str]
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str]
+
+
+class Result(BaseModel):
+    event: Optional[Event]
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/without_field_constraints.py 0.45.0-1/tests/data/expected/main/openapi/without_field_constraints.py
--- 0.26.4-3/tests/data/expected/main/openapi/without_field_constraints.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/without_field_constraints.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,84 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field, confloat, conint, constr
+
+
+class Pet(BaseModel):
+    id: conint(ge=0, le=9223372036854775807)
+    name: constr(max_length=256)
+    tag: Optional[constr(max_length=64)] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet] = Field(..., max_items=10, min_items=1, unique_items=True)
+
+
+class UID(BaseModel):
+    __root__: conint(ge=0)
+
+
+class Phone(BaseModel):
+    __root__: constr(min_length=3)
+
+
+class User(BaseModel):
+    id: conint(ge=0)
+    name: constr(max_length=256)
+    tag: Optional[constr(max_length=64)] = None
+    uid: UID
+    phones: Optional[List[Phone]] = Field(None, max_items=10)
+    fax: Optional[List[constr(min_length=3)]] = None
+    height: Optional[Union[conint(ge=1, le=300), confloat(ge=1.0, le=300.0)]] = None
+    weight: Optional[Union[confloat(ge=1.0, le=1000.0), conint(ge=1, le=1000)]] = None
+    age: Optional[conint(le=200, gt=0)] = None
+    rating: Optional[confloat(le=5.0, gt=0.0)] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/without_field_constraints_pydantic_v2.py 0.45.0-1/tests/data/expected/main/openapi/without_field_constraints_pydantic_v2.py
--- 0.26.4-3/tests/data/expected/main/openapi/without_field_constraints_pydantic_v2.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/without_field_constraints_pydantic_v2.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,84 @@
+# generated by datamodel-codegen:
+#   filename:  api_constrained.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import AnyUrl, BaseModel, Field, RootModel, confloat, conint, constr
+
+
+class Pet(BaseModel):
+    id: conint(ge=0, le=9223372036854775807)
+    name: constr(max_length=256)
+    tag: Optional[constr(max_length=64)] = None
+
+
+class Pets(RootModel[List[Pet]]):
+    root: List[Pet] = Field(..., max_length=10, min_length=1)
+
+
+class UID(RootModel[conint(ge=0)]):
+    root: conint(ge=0)
+
+
+class Phone(RootModel[constr(min_length=3)]):
+    root: constr(min_length=3)
+
+
+class User(BaseModel):
+    id: conint(ge=0)
+    name: constr(max_length=256)
+    tag: Optional[constr(max_length=64)] = None
+    uid: UID
+    phones: Optional[List[Phone]] = Field(None, max_length=10)
+    fax: Optional[List[constr(min_length=3)]] = None
+    height: Optional[Union[conint(ge=1, le=300), confloat(ge=1.0, le=300.0)]] = None
+    weight: Optional[Union[confloat(ge=1.0, le=1000.0), conint(ge=1, le=1000)]] = None
+    age: Optional[conint(le=200, gt=0)] = None
+    rating: Optional[confloat(le=5.0, gt=0.0)] = None
+
+
+class Users(RootModel[List[User]]):
+    root: List[User]
+
+
+class Id(RootModel[str]):
+    root: str
+
+
+class Rules(RootModel[List[str]]):
+    root: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(RootModel[List[Api]]):
+    root: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main/openapi/x_enum_names.py 0.45.0-1/tests/data/expected/main/openapi/x_enum_names.py
--- 0.26.4-3/tests/data/expected/main/openapi/x_enum_names.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/openapi/x_enum_names.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  x_enum_names.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum, IntEnum
+
+
+class CustomerColor(IntEnum):
+    BloodOrange = 1
+    Sunflower = 2
+    LightGreen = 3
+    SkyBlue = 4
+    Purple = 5
+
+
+class StringStatus(Enum):
+    Pending = 'pending'
+    Active = 'active'
+    Closed = 'closed'
+
+
+class PriorityTest(IntEnum):
+    VarnameOne = 1
+    VarnameTwo = 2
+
+
+class ShortNames(IntEnum):
+    First = 1
+    Second = 2
+    integer_3 = 3
+    integer_4 = 4
diff -pruN 0.26.4-3/tests/data/expected/main/person.py 0.45.0-1/tests/data/expected/main/person.py
--- 0.26.4-3/tests/data/expected/main/person.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/person.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+# generated by datamodel-codegen:
+#   filename:  person.json
+
+from __future__ import annotations
+
+from typing import Any, List, Optional
+
+from pydantic import BaseModel, Field, conint
+
+
+class Person(BaseModel):
+    firstName: Optional[str] = Field(None, description="The person's first name.")
+    lastName: Optional[str] = Field(None, description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[List[Any]] = None
+    comment: None = None
diff -pruN 0.26.4-3/tests/data/expected/main/skip_root_model.py 0.45.0-1/tests/data/expected/main/skip_root_model.py
--- 0.26.4-3/tests/data/expected/main/skip_root_model.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/skip_root_model.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  skip_root_model_test.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Person(BaseModel):
+    name: str
+    age: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main/space_and_special_characters_dict.py 0.45.0-1/tests/data/expected/main/space_and_special_characters_dict.py
--- 0.26.4-3/tests/data/expected/main/space_and_special_characters_dict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/space_and_special_characters_dict.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,46 @@
+# generated by datamodel-codegen:
+#   filename:  space_and_special_characters_dict.py
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class InitialParameters(BaseModel):
+    V1: int
+    V2: int
+
+
+class Data(BaseModel):
+    Length__m_: float = Field(..., alias='Length (m)')
+    Symmetric_deviation____: float = Field(..., alias='Symmetric deviation (%)')
+    Total_running_time__s_: int = Field(..., alias='Total running time (s)')
+    Mass__kg_: float = Field(..., alias='Mass (kg)')
+    Initial_parameters: InitialParameters = Field(..., alias='Initial parameters')
+    class_: str = Field(..., alias='class')
+
+
+class Values(BaseModel):
+    field_1_Step: str = Field(..., alias='1 Step')
+    field_2_Step: str = Field(..., alias='2 Step')
+
+
+class Recursive1(BaseModel):
+    value: float
+
+
+class Sub(BaseModel):
+    recursive: Recursive1
+
+
+class Recursive(BaseModel):
+    sub: Sub
+
+
+class Model(BaseModel):
+    Serial_Number: str = Field(..., alias='Serial Number')
+    Timestamp: str
+    Data: Data
+    values: Values
+    recursive: Recursive
diff -pruN 0.26.4-3/tests/data/expected/main/use_attribute_docstrings.py 0.45.0-1/tests/data/expected/main/use_attribute_docstrings.py
--- 0.26.4-3/tests/data/expected/main/use_attribute_docstrings.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/use_attribute_docstrings.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  use_attribute_docstrings_test.json
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, ConfigDict
+
+
+class Person(BaseModel):
+    model_config = ConfigDict(
+        use_attribute_docstrings=True,
+    )
+    name: str
+    """
+    The person's full name
+    """
+    age: Optional[int] = None
+    """
+    The person's age in years
+    """
diff -pruN 0.26.4-3/tests/data/expected/main/use_specialized_enum_py311.py 0.45.0-1/tests/data/expected/main/use_specialized_enum_py311.py
--- 0.26.4-3/tests/data/expected/main/use_specialized_enum_py311.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/use_specialized_enum_py311.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  string_enum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import StrEnum
+
+
+class Model(StrEnum):
+    A = 'A'
+    B = 'B'
diff -pruN 0.26.4-3/tests/data/expected/main/yaml.py 0.45.0-1/tests/data/expected/main/yaml.py
--- 0.26.4-3/tests/data/expected/main/yaml.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main/yaml.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  pet.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    name: str
+    age: int
+
+
+class Model(BaseModel):
+    Pet: Pet
diff -pruN 0.26.4-3/tests/data/expected/main_kr/custom_file_header/with_option.py 0.45.0-1/tests/data/expected/main_kr/custom_file_header/with_option.py
--- 0.26.4-3/tests/data/expected/main_kr/custom_file_header/with_option.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/custom_file_header/with_option.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# Copyright 2024 MyCompany
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Person(BaseModel):
+    first_name: str = Field(..., alias='first-name')
+    last_name: str = Field(..., alias='last-name')
+    email_address: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/custom_file_header/without_option.py 0.45.0-1/tests/data/expected/main_kr/custom_file_header/without_option.py
--- 0.26.4-3/tests/data/expected/main_kr/custom_file_header/without_option.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/custom_file_header/without_option.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  no_alias.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Person(BaseModel):
+    first_name: str = Field(..., alias='first-name')
+    last_name: str = Field(..., alias='last-name')
+    email_address: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/custom_formatters_kwargs/output.py 0.45.0-1/tests/data/expected/main_kr/custom_formatters_kwargs/output.py
--- 0.26.4-3/tests/data/expected/main_kr/custom_formatters_kwargs/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/custom_formatters_kwargs/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  pet_simple.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: Optional[int] = None
+    name: Optional[str] = None
+    tag: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/encoding/output.py 0.45.0-1/tests/data/expected/main_kr/encoding/output.py
--- 0.26.4-3/tests/data/expected/main_kr/encoding/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/encoding/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+# generated by datamodel-codegen:
+#   filename:  encoding_test.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class 日本語Model(BaseModel):
+    名前: Optional[str] = Field(None, description='ユーザー名')
+    年齢: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/formatters/output.py 0.45.0-1/tests/data/expected/main_kr/formatters/output.py
--- 0.26.4-3/tests/data/expected/main_kr/formatters/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/formatters/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  pet_simple.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: Optional[int] = None
+    name: Optional[str] = None
+    tag: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/generate_cli_command/basic.txt 0.45.0-1/tests/data/expected/main_kr/generate_cli_command/basic.txt
--- 0.26.4-3/tests/data/expected/main_kr/generate_cli_command/basic.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/generate_cli_command/basic.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+datamodel-codegen --input schema.yaml --output model.py
+
diff -pruN 0.26.4-3/tests/data/expected/main_kr/generate_cli_command/boolean_options.txt 0.45.0-1/tests/data/expected/main_kr/generate_cli_command/boolean_options.txt
--- 0.26.4-3/tests/data/expected/main_kr/generate_cli_command/boolean_options.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/generate_cli_command/boolean_options.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+datamodel-codegen --collapse-root-models --snake-case-field --use-annotated
+
diff -pruN 0.26.4-3/tests/data/expected/main_kr/generate_cli_command/excluded_options.txt 0.45.0-1/tests/data/expected/main_kr/generate_cli_command/excluded_options.txt
--- 0.26.4-3/tests/data/expected/main_kr/generate_cli_command/excluded_options.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/generate_cli_command/excluded_options.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+datamodel-codegen --input schema.yaml
+
diff -pruN 0.26.4-3/tests/data/expected/main_kr/generate_cli_command/false_boolean.txt 0.45.0-1/tests/data/expected/main_kr/generate_cli_command/false_boolean.txt
--- 0.26.4-3/tests/data/expected/main_kr/generate_cli_command/false_boolean.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/generate_cli_command/false_boolean.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+datamodel-codegen --input schema.yaml
+
diff -pruN 0.26.4-3/tests/data/expected/main_kr/generate_cli_command/list_options.txt 0.45.0-1/tests/data/expected/main_kr/generate_cli_command/list_options.txt
--- 0.26.4-3/tests/data/expected/main_kr/generate_cli_command/list_options.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/generate_cli_command/list_options.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+datamodel-codegen --strict-types str int
+
diff -pruN 0.26.4-3/tests/data/expected/main_kr/generate_cli_command/multiple_options.txt 0.45.0-1/tests/data/expected/main_kr/generate_cli_command/multiple_options.txt
--- 0.26.4-3/tests/data/expected/main_kr/generate_cli_command/multiple_options.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/generate_cli_command/multiple_options.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+datamodel-codegen --input schema.yaml --output model.py --output-model-type pydantic_v2.BaseModel --snake-case-field --strict-types str bytes --target-python-version 3.11
+
diff -pruN 0.26.4-3/tests/data/expected/main_kr/generate_cli_command/no_use_specialized_enum.txt 0.45.0-1/tests/data/expected/main_kr/generate_cli_command/no_use_specialized_enum.txt
--- 0.26.4-3/tests/data/expected/main_kr/generate_cli_command/no_use_specialized_enum.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/generate_cli_command/no_use_specialized_enum.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+datamodel-codegen --input schema.yaml --no-use-specialized-enum
+
diff -pruN 0.26.4-3/tests/data/expected/main_kr/generate_cli_command/spaces_in_values.txt 0.45.0-1/tests/data/expected/main_kr/generate_cli_command/spaces_in_values.txt
--- 0.26.4-3/tests/data/expected/main_kr/generate_cli_command/spaces_in_values.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/generate_cli_command/spaces_in_values.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+datamodel-codegen --http-headers "Authorization: Bearer token" "X-Custom: value" --input "my schema.yaml" --output "my model.py"
+
diff -pruN 0.26.4-3/tests/data/expected/main_kr/generate_pyproject_config/basic.txt 0.45.0-1/tests/data/expected/main_kr/generate_pyproject_config/basic.txt
--- 0.26.4-3/tests/data/expected/main_kr/generate_pyproject_config/basic.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/generate_pyproject_config/basic.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,4 @@
+[tool.datamodel-codegen]
+input = "schema.yaml"
+output = "model.py"
+
diff -pruN 0.26.4-3/tests/data/expected/main_kr/generate_pyproject_config/boolean_options.txt 0.45.0-1/tests/data/expected/main_kr/generate_pyproject_config/boolean_options.txt
--- 0.26.4-3/tests/data/expected/main_kr/generate_pyproject_config/boolean_options.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/generate_pyproject_config/boolean_options.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,5 @@
+[tool.datamodel-codegen]
+collapse-root-models = true
+snake-case-field = true
+use-annotated = true
+
diff -pruN 0.26.4-3/tests/data/expected/main_kr/generate_pyproject_config/enum_option.txt 0.45.0-1/tests/data/expected/main_kr/generate_pyproject_config/enum_option.txt
--- 0.26.4-3/tests/data/expected/main_kr/generate_pyproject_config/enum_option.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/generate_pyproject_config/enum_option.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,4 @@
+[tool.datamodel-codegen]
+input = "schema.yaml"
+read-only-write-only-model-type = "all"
+
diff -pruN 0.26.4-3/tests/data/expected/main_kr/generate_pyproject_config/excludes_meta_options.txt 0.45.0-1/tests/data/expected/main_kr/generate_pyproject_config/excludes_meta_options.txt
--- 0.26.4-3/tests/data/expected/main_kr/generate_pyproject_config/excludes_meta_options.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/generate_pyproject_config/excludes_meta_options.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+[tool.datamodel-codegen]
+input = "schema.yaml"
+
diff -pruN 0.26.4-3/tests/data/expected/main_kr/generate_pyproject_config/list_options.txt 0.45.0-1/tests/data/expected/main_kr/generate_pyproject_config/list_options.txt
--- 0.26.4-3/tests/data/expected/main_kr/generate_pyproject_config/list_options.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/generate_pyproject_config/list_options.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+[tool.datamodel-codegen]
+strict-types = ["str", "int"]
+
diff -pruN 0.26.4-3/tests/data/expected/main_kr/generate_pyproject_config/multiple_options.txt 0.45.0-1/tests/data/expected/main_kr/generate_pyproject_config/multiple_options.txt
--- 0.26.4-3/tests/data/expected/main_kr/generate_pyproject_config/multiple_options.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/generate_pyproject_config/multiple_options.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+[tool.datamodel-codegen]
+input = "schema.yaml"
+output = "model.py"
+output-model-type = "pydantic_v2.BaseModel"
+snake-case-field = true
+strict-types = ["str", "bytes"]
+target-python-version = "3.11"
+
diff -pruN 0.26.4-3/tests/data/expected/main_kr/ignore_pyproject/output.py 0.45.0-1/tests/data/expected/main_kr/ignore_pyproject/output.py
--- 0.26.4-3/tests/data/expected/main_kr/ignore_pyproject/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/ignore_pyproject/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  schema.json
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    firstName: Optional[str] = None
+    lastName: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/ignore_pyproject/without_option.py 0.45.0-1/tests/data/expected/main_kr/ignore_pyproject/without_option.py
--- 0.26.4-3/tests/data/expected/main_kr/ignore_pyproject/without_option.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/ignore_pyproject/without_option.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  schema.json
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Model(BaseModel):
+    first_name: Optional[str] = Field(None, alias='firstName')
+    last_name: Optional[str] = Field(None, alias='lastName')
diff -pruN 0.26.4-3/tests/data/expected/main_kr/include_path_parameters/output.py 0.45.0-1/tests/data/expected/main_kr/include_path_parameters/output.py
--- 0.26.4-3/tests/data/expected/main_kr/include_path_parameters/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/include_path_parameters/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+# generated by datamodel-codegen:
+#   filename:  include_path_parameters.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Post(BaseModel):
+    id: Optional[str] = None
+    title: Optional[str] = None
+    content: Optional[str] = None
+
+
+class UsersUserIdPostsPostIdGetParameters(BaseModel):
+    userId: int
+    postId: str
+    includeComments: Optional[bool] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/input_output/output.py 0.45.0-1/tests/data/expected/main_kr/input_output/output.py
--- 0.26.4-3/tests/data/expected/main_kr/input_output/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/input_output/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  pet_simple.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: Optional[int] = None
+    name: Optional[str] = None
+    tag: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main/output.py 0.45.0-1/tests/data/expected/main_kr/main/output.py
--- 0.26.4-3/tests/data/expected/main_kr/main/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_allof_with_description_only/output.py 0.45.0-1/tests/data/expected/main_kr/main_allof_with_description_only/output.py
--- 0.26.4-3/tests/data/expected/main_kr/main_allof_with_description_only/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main_allof_with_description_only/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+# generated by datamodel-codegen:
+#   filename:  allof_with_description_only.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class MyModel(BaseModel):
+    """
+    A model that has a description.
+    """
+
+    name: Optional[str] = None
+
+
+class MyOtherModel(MyModel):
+    """
+    Another model that should also have a description.
+    """
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_base_class/output.py 0.45.0-1/tests/data/expected/main_kr/main_base_class/output.py
--- 0.26.4-3/tests/data/expected/main_kr/main_base_class/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main_base_class/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,71 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, Field
+
+from custom_module import Base
+
+
+class Pet(Base):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(Base):
+    __root__: List[Pet]
+
+
+class User(Base):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(Base):
+    __root__: List[User]
+
+
+class Id(Base):
+    __root__: str
+
+
+class Rules(Base):
+    __root__: List[str]
+
+
+class Error(Base):
+    code: int
+    message: str
+
+
+class Api(Base):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(Base):
+    __root__: List[Api]
+
+
+class Event(Base):
+    name: Optional[str] = None
+
+
+class Result(Base):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_custom_template_dir/output.py 0.45.0-1/tests/data/expected/main_kr/main_custom_template_dir/output.py
--- 0.26.4-3/tests/data/expected/main_kr/main_custom_template_dir/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main_custom_template_dir/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,61 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):  # 1 2, 1 2, this is just a pet
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = None
+    apiVersionNumber: Optional[str] = None
+    apiUrl: Optional[AnyUrl] = None
+    apiDocumentationUrl: Optional[AnyUrl] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/__init__.py 0.45.0-1/tests/data/expected/main_kr/main_modular/__init__.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main_modular/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from ._internal import DifferentTea, Error, Id, OptionalModel, Result, Source
+
+__all__ = ["DifferentTea", "Error", "Id", "OptionalModel", "Result", "Source"]
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/_internal.py 0.45.0-1/tests/data/expected/main_kr/main_modular/_internal.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main_modular/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,67 @@
+# generated by datamodel-codegen:
+#   filename:  _internal
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+from . import models
+
+
+class OptionalModel(BaseModel):
+    __root__: str
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Result(BaseModel):
+    event: Optional[models.Event] = None
+
+
+class Source(BaseModel):
+    country: Optional[str] = None
+
+
+class DifferentTea(BaseModel):
+    foo: Optional[Tea] = None
+    nested: Optional[Tea_1] = None
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+
+
+class Cocoa(BaseModel):
+    quality: Optional[int] = None
+
+
+class Tea_1(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class TeaClone(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class ListModel(BaseModel):
+    __root__: List[Tea_1]
+
+
+Tea_1.update_forward_refs()
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/bar.py 0.45.0-1/tests/data/expected/main_kr/main_modular/bar.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main_modular/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from pydantic import BaseModel, Field
+
+
+class FieldModel(BaseModel):
+    __root__: str = Field(..., example='green')
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/collections.py 0.45.0-1/tests/data/expected/main_kr/main_modular/collections.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main_modular/collections.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,51 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import models
+
+
+class Pets(BaseModel):
+    __root__: List[models.Pet]
+
+
+class Users(BaseModel):
+    __root__: List[models.User]
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Stage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    stage: Optional[Stage] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/foo/__init__.py 0.45.0-1/tests/data/expected/main_kr/main_modular/foo/__init__.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main_modular/foo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import Cocoa, Tea
+
+__all__ = ["Cocoa", "Tea"]
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/foo/bar.py 0.45.0-1/tests/data/expected/main_kr/main_modular/foo/bar.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main_modular/foo/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    attributes: Optional[Dict[str, Any]] = None
+
+
+class Thang(BaseModel):
+    attributes: Optional[List[Dict[str, Any]]] = None
+
+
+class Others(BaseModel):
+    name: Optional[str] = None
+
+
+class Clone(Thing):
+    others: Optional[Others] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/models.py 0.45.0-1/tests/data/expected/main_kr/main_modular/models.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main_modular/models.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[Species] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Event(BaseModel):
+    name: Optional[Union[str, float, int, bool, Dict[str, Any], List[str]]] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/nested/__init__.py 0.45.0-1/tests/data/expected/main_kr/main_modular/nested/__init__.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/nested/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main_modular/nested/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/nested/foo.py 0.45.0-1/tests/data/expected/main_kr/main_modular/nested/foo.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main_modular/nested/foo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from .._internal import ListModel
+from .._internal import Tea_1 as Tea
+from .._internal import TeaClone
+
+__all__ = ["ListModel", "Tea", "TeaClone"]
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/woo/__init__.py 0.45.0-1/tests/data/expected/main_kr/main_modular/woo/__init__.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/woo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main_modular/woo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_modular/woo/boo.py 0.45.0-1/tests/data/expected/main_kr/main_modular/woo/boo.py
--- 0.26.4-3/tests/data/expected/main_kr/main_modular/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main_modular/woo/boo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  modular.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import bar
+from .._internal import Cocoa, Source
+
+
+class Chocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[Source] = None
+    cocoa: Optional[Cocoa] = None
+    field: Optional[bar.FieldModel] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_no_file/output.py 0.45.0-1/tests/data/expected/main_kr/main_no_file/output.py
--- 0.26.4-3/tests/data/expected/main_kr/main_no_file/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main_no_file/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 1985-10-26T08:21:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_use_field_description/output.py 0.45.0-1/tests/data/expected/main_kr/main_use_field_description/output.py
--- 0.26.4-3/tests/data/expected/main_kr/main_use_field_description/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main_use_field_description/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,74 @@
+# generated by datamodel-codegen:
+#   filename:  api_multiline_docstrings.yaml
+#   timestamp: 2022-11-11T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = None
+    """
+    To be used as a dataset parameter value.
+    Now also with multi-line docstrings.
+    """
+    apiVersionNumber: Optional[str] = None
+    """
+    To be used as a version parameter value
+    """
+    apiUrl: Optional[AnyUrl] = None
+    """
+    The URL describing the dataset's fields
+    """
+    apiDocumentationUrl: Optional[AnyUrl] = None
+    """
+    A URL to the API console for each API
+    """
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_use_inline_field_description/output.py 0.45.0-1/tests/data/expected/main_kr/main_use_inline_field_description/output.py
--- 0.26.4-3/tests/data/expected/main_kr/main_use_inline_field_description/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main_use_inline_field_description/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,80 @@
+# generated by datamodel-codegen:
+#   filename:  api_multiline_docstrings.yaml
+#   timestamp: 2022-11-11T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None,
+        description='To be used as a dataset parameter value.\nNow also with multi-line docstrings.',
+    )
+    """
+    To be used as a dataset parameter value.
+    Now also with multi-line docstrings.
+    """
+
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    """To be used as a version parameter value"""
+
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    """The URL describing the dataset's fields"""
+
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+    """A URL to the API console for each API"""
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/main_use_schema_description/output.py 0.45.0-1/tests/data/expected/main_kr/main_use_schema_description/output.py
--- 0.26.4-3/tests/data/expected/main_kr/main_use_schema_description/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/main_use_schema_description/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,79 @@
+# generated by datamodel-codegen:
+#   filename:  api_multiline_docstrings.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    """
+    error result.
+    Now with multi-line docstrings.
+    """
+
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None,
+        description='To be used as a dataset parameter value.\nNow also with multi-line docstrings.',
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    """
+    Event object
+    """
+
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/no_alias/with_option.py 0.45.0-1/tests/data/expected/main_kr/no_alias/with_option.py
--- 0.26.4-3/tests/data/expected/main_kr/no_alias/with_option.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/no_alias/with_option.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  no_alias.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Person(BaseModel):
+    first_name: str
+    last_name: str
+    email_address: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/no_alias/without_option.py 0.45.0-1/tests/data/expected/main_kr/no_alias/without_option.py
--- 0.26.4-3/tests/data/expected/main_kr/no_alias/without_option.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/no_alias/without_option.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  no_alias.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Person(BaseModel):
+    first_name: str = Field(..., alias='first-name')
+    last_name: str = Field(..., alias='last-name')
+    email_address: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/pyproject/output.py 0.45.0-1/tests/data/expected/main_kr/pyproject/output.py
--- 0.26.4-3/tests/data/expected/main_kr/pyproject/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/pyproject/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,92 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import (
+    annotations,
+)
+
+from typing import (
+    List,
+    Optional,
+)
+
+from pydantic import (
+    AnyUrl,
+    BaseModel,
+    Field,
+)
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[
+        str
+    ] = Field(
+        None,
+        description="To be used as a dataset parameter value",
+    )
+    apiVersionNumber: Optional[
+        str
+    ] = Field(
+        None,
+        description="To be used as a version parameter value",
+    )
+    apiUrl: Optional[
+        AnyUrl
+    ] = Field(
+        None,
+        description="The URL describing the dataset's fields",
+    )
+    apiDocumentationUrl: Optional[
+        AnyUrl
+    ] = Field(
+        None,
+        description="A URL to the API console for each API",
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[
+        Event
+    ] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/pyproject/output.strictstr.py 0.45.0-1/tests/data/expected/main_kr/pyproject/output.strictstr.py
--- 0.26.4-3/tests/data/expected/main_kr/pyproject/output.strictstr.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/pyproject/output.strictstr.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field, StrictStr
+
+
+class Pet(BaseModel):
+    id: int
+    name: StrictStr
+    tag: Optional[StrictStr] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: StrictStr
+    tag: Optional[StrictStr] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: StrictStr
+
+
+class Rules(BaseModel):
+    __root__: List[StrictStr]
+
+
+class Error(BaseModel):
+    code: int
+    message: StrictStr
+
+
+class Api(BaseModel):
+    apiKey: Optional[StrictStr] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[StrictStr] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[StrictStr] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/pyproject_profile/cli_override.py 0.45.0-1/tests/data/expected/main_kr/pyproject_profile/cli_override.py
--- 0.26.4-3/tests/data/expected/main_kr/pyproject_profile/cli_override.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/pyproject_profile/cli_override.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+# generated by datamodel-codegen:
+#   filename:  schema.json
+
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    firstName: str | None = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/pyproject_profile/ignore_pyproject.py 0.45.0-1/tests/data/expected/main_kr/pyproject_profile/ignore_pyproject.py
--- 0.26.4-3/tests/data/expected/main_kr/pyproject_profile/ignore_pyproject.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/pyproject_profile/ignore_pyproject.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  schema.json
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    firstName: Optional[str] = None
+    lastName: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/pyproject_profile/inherits_base.py 0.45.0-1/tests/data/expected/main_kr/pyproject_profile/inherits_base.py
--- 0.26.4-3/tests/data/expected/main_kr/pyproject_profile/inherits_base.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/pyproject_profile/inherits_base.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  schema.json
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Model(BaseModel):
+    first_name: Optional[str] = Field(None, alias='firstName')
diff -pruN 0.26.4-3/tests/data/expected/main_kr/pyproject_profile/shallow_merge.py 0.45.0-1/tests/data/expected/main_kr/pyproject_profile/shallow_merge.py
--- 0.26.4-3/tests/data/expected/main_kr/pyproject_profile/shallow_merge.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/pyproject_profile/shallow_merge.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+# generated by datamodel-codegen:
+#   filename:  schema.json
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, StrictBytes
+
+
+class Model(BaseModel):
+    data: Optional[StrictBytes] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/pyproject_profile/with_profile.py 0.45.0-1/tests/data/expected/main_kr/pyproject_profile/with_profile.py
--- 0.26.4-3/tests/data/expected/main_kr/pyproject_profile/with_profile.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/pyproject_profile/with_profile.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+# generated by datamodel-codegen:
+#   filename:  schema.json
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Model(BaseModel):
+    first_name: Optional[str] = Field(None, alias='firstName')
+    last_name: Optional[str] = Field(None, alias='lastName')
diff -pruN 0.26.4-3/tests/data/expected/main_kr/target_python_version/output.py 0.45.0-1/tests/data/expected/main_kr/target_python_version/output.py
--- 0.26.4-3/tests/data/expected/main_kr/target_python_version/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/target_python_version/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+# generated by datamodel-codegen:
+#   filename:  api.yaml
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/target_python_version/py310.py 0.45.0-1/tests/data/expected/main_kr/target_python_version/py310.py
--- 0.26.4-3/tests/data/expected/main_kr/target_python_version/py310.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/target_python_version/py310.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any
+
+from pydantic import BaseModel, Field, conint
+
+
+class Person(BaseModel):
+    firstName: str | None = Field(None, description="The person's first name.")
+    lastName: str | None = Field(None, description="The person's last name.")
+    age: conint(ge=0) | None = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: list[Any] | None = None
+    comment: None = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/target_python_version/py39.py 0.45.0-1/tests/data/expected/main_kr/target_python_version/py39.py
--- 0.26.4-3/tests/data/expected/main_kr/target_python_version/py39.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/target_python_version/py39.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# generated by datamodel-codegen:
+#   filename:  person.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel, Field, conint
+
+
+class Person(BaseModel):
+    firstName: Optional[str] = Field(None, description="The person's first name.")
+    lastName: Optional[str] = Field(None, description="The person's last name.")
+    age: Optional[conint(ge=0)] = Field(
+        None, description='Age in years which must be equal to or greater than zero.'
+    )
+    friends: Optional[list[Any]] = None
+    comment: None = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/url_with_headers/output.py 0.45.0-1/tests/data/expected/main_kr/url_with_headers/output.py
--- 0.26.4-3/tests/data/expected/main_kr/url_with_headers/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/url_with_headers/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+# generated by datamodel-codegen:
+#   filename:  https://api.example.com/schema.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: Optional[int] = None
+    name: Optional[str] = None
+    tag: Optional[str] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/use_decimal_for_multiple_of/output.py 0.45.0-1/tests/data/expected/main_kr/use_decimal_for_multiple_of/output.py
--- 0.26.4-3/tests/data/expected/main_kr/use_decimal_for_multiple_of/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/use_decimal_for_multiple_of/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# generated by datamodel-codegen:
+#   filename:  use_decimal_for_multiple_of.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, condecimal, confloat
+
+
+class Model(BaseModel):
+    price: Optional[condecimal(ge=0, le=99999.99, multiple_of=0.01)] = None
+    quantity: Optional[condecimal(multiple_of=0.1)] = None
+    rate: Optional[condecimal(multiple_of=0.001, lt=1.0, gt=0.0)] = None
+    simple_float: Optional[confloat(ge=0.0, le=100.0)] = None
diff -pruN 0.26.4-3/tests/data/expected/main_kr/use_non_positive_negative/output.py 0.45.0-1/tests/data/expected/main_kr/use_non_positive_negative/output.py
--- 0.26.4-3/tests/data/expected/main_kr/use_non_positive_negative/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/use_non_positive_negative/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,31 @@
+# generated by datamodel-codegen:
+#   filename:  use_non_positive_negative.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import (
+    BaseModel,
+    Field,
+    NonNegativeFloat,
+    NonNegativeInt,
+    NonPositiveFloat,
+    NonPositiveInt,
+)
+
+
+class NumberConstraints(BaseModel):
+    non_negative_count: Optional[NonNegativeInt] = Field(
+        None, description='A count that cannot be negative'
+    )
+    non_positive_balance: Optional[NonPositiveInt] = Field(
+        None, description='A balance that cannot be positive'
+    )
+    non_negative_amount: Optional[NonNegativeFloat] = Field(
+        None, description='An amount that cannot be negative'
+    )
+    non_positive_score: Optional[NonPositiveFloat] = Field(
+        None, description='A score that cannot be positive'
+    )
diff -pruN 0.26.4-3/tests/data/expected/main_kr/use_pendulum/output.py 0.45.0-1/tests/data/expected/main_kr/use_pendulum/output.py
--- 0.26.4-3/tests/data/expected/main_kr/use_pendulum/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/main_kr/use_pendulum/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+# generated by datamodel-codegen:
+#   filename:  use_pendulum.json
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Optional
+
+from pendulum import Date, DateTime, Duration
+from pydantic import BaseModel
+
+
+class Event(BaseModel):
+    name: str
+    created_at: DateTime
+    event_date: Optional[Date] = None
+    duration: Optional[Duration] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/graphql/field-default-enum.py 0.45.0-1/tests/data/expected/parser/graphql/field-default-enum.py
--- 0.26.4-3/tests/data/expected/parser/graphql/field-default-enum.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/graphql/field-default-enum.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,34 @@
+# generated by datamodel-codegen:
+#   filename:  field-default-enum.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Literal, Optional
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class Color(Enum):
+    BLUE = 'BLUE'
+    GREEN = 'GREEN'
+    RED = 'RED'
+
+
+class Car(BaseModel):
+    colorList: Optional[List[Color]] = [Color.RED]
+    colorOne: Optional[Color] = Color.GREEN
+    typename__: Optional[Literal['Car']] = Field('Car', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/parser/graphql/union-aliased-bug.py 0.45.0-1/tests/data/expected/parser/graphql/union-aliased-bug.py
--- 0.26.4-3/tests/data/expected/parser/graphql/union-aliased-bug.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/graphql/union-aliased-bug.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,46 @@
+# generated by datamodel-codegen:
+#   filename:  union-aliased-bug.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class GroupMetadata(BaseModel):
+    name: String
+    typename__: Optional[Literal['GroupMetadata']] = Field(
+        'GroupMetadata', alias='__typename'
+    )
+
+
+class UserMetadata(BaseModel):
+    name: String
+    typename__: Optional[Literal['UserMetadata']] = Field(
+        'UserMetadata', alias='__typename'
+    )
+
+
+Metadata: TypeAlias = Union[
+    'GroupMetadata',
+    'UserMetadata',
+]
+
+
+class Resource(BaseModel):
+    metadata: UserMetadata
+    typename__: Optional[Literal['Resource']] = Field('Resource', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/parser/graphql/union-commented.py 0.45.0-1/tests/data/expected/parser/graphql/union-commented.py
--- 0.26.4-3/tests/data/expected/parser/graphql/union-commented.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/graphql/union-commented.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,66 @@
+# generated by datamodel-codegen:
+#   filename:  union-commented.graphql
+#   timestamp: 2019-07-26T00:00:00+00:00
+
+from __future__ import annotations
+
+from typing import Literal, Optional, Union
+
+from pydantic import BaseModel, Field
+from typing_extensions import TypeAlias
+
+Boolean: TypeAlias = bool
+"""
+The `Boolean` scalar type represents `true` or `false`.
+"""
+
+
+String: TypeAlias = str
+"""
+The `String` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text.
+"""
+
+
+class GroupMetadata(BaseModel):
+    """
+    This is a test comment in a single line
+    """
+
+    name: String
+    typename__: Optional[Literal['GroupMetadata']] = Field(
+        'GroupMetadata', alias='__typename'
+    )
+
+
+class UserMetadata(BaseModel):
+    """
+    This is a multiline comment,
+    with a line break,
+    and a line break
+    """
+
+    name: String
+    typename__: Optional[Literal['UserMetadata']] = Field(
+        'UserMetadata', alias='__typename'
+    )
+
+
+# This is a single line comment
+DummyMetadata: TypeAlias = Union[
+    'GroupMetadata',
+    'UserMetadata',
+]
+
+
+# This is another multiline comment,
+# with a line break,
+# and another line break
+Metadata: TypeAlias = Union[
+    'GroupMetadata',
+    'UserMetadata',
+]
+
+
+class Resource(BaseModel):
+    metadata: UserMetadata
+    typename__: Optional[Literal['Resource']] = Field('Resource', alias='__typename')
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse/format.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse/format.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse/format.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse/format.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,50 @@
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = None
+    apiVersionNumber: Optional[str] = None
+    apiUrl: Optional[AnyUrl] = None
+    apiDocumentationUrl: Optional[AnyUrl] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse/with_import.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse/with_import.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse/with_import.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse/with_import.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,55 @@
+from __future__ import annotations
+from typing import List, Optional
+from pydantic import AnyUrl, BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = None
+    apiVersionNumber: Optional[str] = None
+    apiUrl: Optional[AnyUrl] = None
+    apiDocumentationUrl: Optional[AnyUrl] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse/with_import_format.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse/with_import_format.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse/with_import_format.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse/with_import_format.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,57 @@
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = None
+    apiVersionNumber: Optional[str] = None
+    apiUrl: Optional[AnyUrl] = None
+    apiDocumentationUrl: Optional[AnyUrl] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse/with_import_format_custom_module.Base.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse/with_import_format_custom_module.Base.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse/with_import_format_custom_module.Base.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse/with_import_format_custom_module.Base.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,59 @@
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl
+
+from custom_module import Base
+
+
+class Pet(Base):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(Base):
+    __root__: List[Pet]
+
+
+class User(Base):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(Base):
+    __root__: List[User]
+
+
+class Id(Base):
+    __root__: str
+
+
+class Rules(Base):
+    __root__: List[str]
+
+
+class Error(Base):
+    code: int
+    message: str
+
+
+class Api(Base):
+    apiKey: Optional[str] = None
+    apiVersionNumber: Optional[str] = None
+    apiUrl: Optional[AnyUrl] = None
+    apiDocumentationUrl: Optional[AnyUrl] = None
+
+
+class Apis(Base):
+    __root__: List[Api]
+
+
+class Event(Base):
+    name: Optional[str] = None
+
+
+class Result(Base):
+    event: Optional[Event] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/format.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/format.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/format.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/format.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,64 @@
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    code: int
+    message: str
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
+
+
+class Broken(BaseModel):
+    foo: Optional[str] = None
+    bar: Optional[int] = None
+
+
+class BrokenArray(BaseModel):
+    broken: Optional[Dict[str, List[Broken]]] = None
+
+
+class FileSetUpload(BaseModel):
+    task_id: Optional[str] = None
+    tags: Dict[str, List[str]]
+
+
+class Test(BaseModel):
+    broken: Optional[Dict[str, Broken]] = None
+    failing: Optional[Dict[str, str]] = {}
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,67 @@
+from __future__ import annotations
+from typing import Dict, List, Optional
+from pydantic import BaseModel, Extra
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    class Config:
+        extra = Extra.allow
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    class Config:
+        extra = Extra.forbid
+    code: int
+    message: str
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
+
+
+class Broken(BaseModel):
+    foo: Optional[str] = None
+    bar: Optional[int] = None
+
+
+class BrokenArray(BaseModel):
+    broken: Optional[Dict[str, List[Broken]]] = None
+
+
+class FileSetUpload(BaseModel):
+    task_id: Optional[str] = None
+    tags: Dict[str, List[str]]
+
+
+class Test(BaseModel):
+    broken: Optional[Dict[str, Broken]] = None
+    failing: Optional[Dict[str, str]] = {}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import_format.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import_format.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import_format.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import_format.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,71 @@
+from __future__ import annotations
+
+from typing import Dict, List, Optional
+
+from pydantic import BaseModel, Extra
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class User(BaseModel):
+    class Config:
+        extra = Extra.allow
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(BaseModel):
+    __root__: List[User]
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Error(BaseModel):
+    class Config:
+        extra = Extra.forbid
+
+    code: int
+    message: str
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
+
+
+class Broken(BaseModel):
+    foo: Optional[str] = None
+    bar: Optional[int] = None
+
+
+class BrokenArray(BaseModel):
+    broken: Optional[Dict[str, List[Broken]]] = None
+
+
+class FileSetUpload(BaseModel):
+    task_id: Optional[str] = None
+    tags: Dict[str, List[str]]
+
+
+class Test(BaseModel):
+    broken: Optional[Dict[str, Broken]] = None
+    failing: Optional[Dict[str, str]] = {}
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import_format_custom_module.Base.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import_format_custom_module.Base.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import_format_custom_module.Base.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_additional_properties/with_import_format_custom_module.Base.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,73 @@
+from __future__ import annotations
+
+from typing import Dict, List, Optional
+
+from pydantic import Extra
+
+from custom_module import Base
+
+
+class Pet(Base):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(Base):
+    __root__: List[Pet]
+
+
+class User(Base):
+    class Config:
+        extra = Extra.allow
+
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Users(Base):
+    __root__: List[User]
+
+
+class Id(Base):
+    __root__: str
+
+
+class Rules(Base):
+    __root__: List[str]
+
+
+class Error(Base):
+    class Config:
+        extra = Extra.forbid
+
+    code: int
+    message: str
+
+
+class Event(Base):
+    name: Optional[str] = None
+
+
+class Result(Base):
+    event: Optional[Event] = None
+
+
+class Broken(Base):
+    foo: Optional[str] = None
+    bar: Optional[int] = None
+
+
+class BrokenArray(Base):
+    broken: Optional[Dict[str, List[Broken]]] = None
+
+
+class FileSetUpload(Base):
+    task_id: Optional[str] = None
+    tags: Dict[str, List[str]]
+
+
+class Test(Base):
+    broken: Optional[Dict[str, Broken]] = None
+    failing: Optional[Dict[str, str]] = {}
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/__init__.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/__init__.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,83 @@
+from __future__ import annotations
+
+from datetime import date, datetime
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import BaseModel, Field, conint
+
+from . import model_s
+
+
+class Pet(Enum):
+    ca_t = 'ca-t'
+    dog_ = 'dog*'
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class HomeAddress(BaseModel):
+    address_1: Optional[str] = Field(None, alias='address-1')
+
+
+class TeamMembers(BaseModel):
+    __root__: List[str]
+
+
+class AllOfObj(BaseModel):
+    name: Optional[str] = None
+    number: Optional[str] = None
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Result(BaseModel):
+    event: Optional[model_s.EvenT] = None
+
+
+class Source(BaseModel):
+    country_name: Optional[str] = Field(None, alias='country-name')
+
+
+class UserName(BaseModel):
+    first_name: Optional[str] = Field(None, alias='first-name')
+    home_address: Optional[HomeAddress] = Field(None, alias='home-address')
+
+
+class AllOfRef(UserName, HomeAddress):
+    pass
+
+
+class AllOfCombine(UserName):
+    birth_date: Optional[date] = Field(None, alias='birth-date')
+    size: Optional[conint(ge=1)] = None
+
+
+class AnyOfCombine(HomeAddress, UserName):
+    age: Optional[str] = None
+
+
+class Item(HomeAddress, UserName):
+    age: Optional[str] = None
+
+
+class AnyOfCombineInObject(BaseModel):
+    item: Optional[Item] = None
+
+
+class AnyOfCombineInArrayItem(HomeAddress, UserName):
+    age: Optional[str] = None
+
+
+class AnyOfCombineInArray(BaseModel):
+    __root__: List[AnyOfCombineInArrayItem]
+
+
+class AnyOfCombineInRoot(HomeAddress, UserName):
+    age: Optional[str] = None
+    birth_date: Optional[datetime] = Field(None, alias='birth-date')
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/collection_s.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/collection_s.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/collection_s.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/collection_s.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,38 @@
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel, Field
+
+from . import model_s
+
+
+class PetS(BaseModel):
+    __root__: List[model_s.PeT]
+
+
+class UserS(BaseModel):
+    __root__: List[model_s.UseR]
+
+
+class RuleS(BaseModel):
+    __root__: List[str]
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = Field(
+        None, description='To be used as a dataset parameter value'
+    )
+    apiVersionNumber: Optional[str] = Field(
+        None, description='To be used as a version parameter value'
+    )
+    apiUrl: Optional[AnyUrl] = Field(
+        None, description="The URL describing the dataset's fields"
+    )
+    apiDocumentationUrl: Optional[AnyUrl] = Field(
+        None, description='A URL to the API console for each API'
+    )
+
+
+class ApiS(BaseModel):
+    __root__: List[Api]
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/fo_o/__init__.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/fo_o/__init__.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/fo_o/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/fo_o/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+from .. import Id
+
+
+class TeA(BaseModel):
+    flavour_name: Optional[str] = Field(None, alias='flavour-name')
+    id: Optional[Id] = None
+
+
+class CocoA(BaseModel):
+    quality: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/fo_o/ba_r.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/fo_o/ba_r.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/fo_o/ba_r.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/fo_o/ba_r.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class ThinG(BaseModel):
+    attribute_s: Optional[Dict[str, Any]] = Field(None, alias='attribute-s')
+
+
+class ThanG(BaseModel):
+    attributes: Optional[List[Dict[str, Any]]] = None
+
+
+class ClonE(ThinG):
+    pass
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/model_s.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/model_s.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/model_s.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/model_s.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class SpecieS(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class PeT(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[SpecieS] = None
+
+
+class UseR(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class EvenT(BaseModel):
+    name: Optional[Union[str, float, int, bool, Dict[str, Any], List[str]]] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/wo_o/bo_o.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/wo_o/bo_o.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_alias/wo_o/bo_o.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_alias/wo_o/bo_o.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+from .. import Source, fo_o
+
+
+class ChocolatE(BaseModel):
+    flavour_name: Optional[str] = Field(None, alias='flavour-name')
+    sourc_e: Optional[Source] = Field(None, alias='sourc-e')
+    coco_a: Optional[fo_o.CocoA] = Field(None, alias='coco-a')
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_allof/output.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_allof/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_allof/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_allof/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,80 @@
+from __future__ import annotations
+
+from datetime import date, datetime
+from typing import List, Optional
+
+from pydantic import BaseModel, conint
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Car(BaseModel):
+    number: str
+
+
+class AllOfref(Pet, Car):
+    pass
+
+
+class AllOfobj(BaseModel):
+    name: Optional[str] = None
+    number: Optional[str] = None
+
+
+class AllOfCombine(Pet):
+    birthdate: Optional[date] = None
+    size: Optional[conint(ge=1)] = None
+
+
+class AnyOfCombine(Pet, Car):
+    age: Optional[str] = None
+
+
+class Item(Pet, Car):
+    age: Optional[str] = None
+
+
+class AnyOfCombineInObject(BaseModel):
+    item: Optional[Item] = None
+
+
+class AnyOfCombineInArrayItem(Pet, Car):
+    age: Optional[str] = None
+
+
+class AnyOfCombineInArray(BaseModel):
+    __root__: List[AnyOfCombineInArrayItem]
+
+
+class AnyOfCombineInRoot(Pet, Car):
+    age: Optional[str] = None
+    birthdate: Optional[datetime] = None
+
+
+class AnyOfCombineUnknownObjectInRoot(BaseModel):
+    __root__: List[Pet]
+
+
+class AnyOfCombineUnknownObjectInArray(Pet):
+    pass
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class AllOfNested3(AllOfCombine):
+    name: Optional[AnyOfCombine] = None
+
+
+class AllOfNested2(AllOfNested3):
+    name: Optional[AllOfNested1] = None
+
+
+class AllOfNested1(AllOfNested2):
+    name: Optional[AllOfCombine] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_allof_required_fields/output.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_allof_required_fields/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_allof_required_fields/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_allof_required_fields/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+from __future__ import annotations
+
+from typing import List
+
+from pydantic import BaseModel, Field
+
+
+class EmailMessage(BaseModel):
+    message: str = Field(..., description='The email message text.')
+    subject: str = Field(..., description='The subject line of the email.')
+    to: List[str] = Field(..., description='A list of email addresses.')
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_allof_same_prefix_with_ref/output.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_allof_same_prefix_with_ref/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_allof_same_prefix_with_ref/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_allof_same_prefix_with_ref/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class FooBar(BaseModel):
+    id: Optional[int] = None
+
+
+class FooBarBaz(BaseModel):
+    id: Optional[int] = None
+
+
+class Foo(BaseModel):
+    foo_bar: Optional[FooBarBaz] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_any/output.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_any/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_any/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_any/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+from __future__ import annotations
+
+from typing import Any, Optional
+
+from pydantic import BaseModel
+
+
+class Item(BaseModel):
+    bar: Optional[Any] = None
+    foo: str
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_anyof/output.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_anyof/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_anyof/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_anyof/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,52 @@
+from __future__ import annotations
+
+from datetime import date
+from typing import Dict, List, Optional, Union
+
+from pydantic import BaseModel, constr
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Car(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class AnyOfItem1(BaseModel):
+    name: Optional[str] = None
+
+
+class AnyOfItem(BaseModel):
+    __root__: Union[Pet, Car, AnyOfItem1, constr(max_length=5000)]
+
+
+class Item(BaseModel):
+    name: Optional[str] = None
+
+
+class AnyOfobj(BaseModel):
+    item: Optional[Union[Pet, Car, Item, constr(max_length=5000)]] = None
+
+
+class AnyOfArray1(BaseModel):
+    name: Optional[str] = None
+    birthday: Optional[date] = None
+
+
+class AnyOfArray(BaseModel):
+    __root__: List[Union[Pet, Car, AnyOfArray1, constr(max_length=5000)]]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Config(BaseModel):
+    setting: Optional[Dict[str, Union[str, List[str]]]] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_anyof_required/output.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_anyof_required/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_anyof_required/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_anyof_required/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel, Field
+
+
+class EmailMessage(BaseModel):
+    bcc: Optional[List[str]] = Field(
+        None, description='A list of "blind carbon copy" email addresses.'
+    )
+    cc: Optional[List[str]] = Field(
+        None, description='A list of "carbon copy" email addresses.'
+    )
+    message: str = Field(..., description='The email message text.')
+    subject: str = Field(..., description='The subject line of the email.')
+    to: Optional[List[str]] = Field(None, description='A list of email addresses.')
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_array_called_fields_with_oneOf_items/output.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_array_called_fields_with_oneOf_items/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_array_called_fields_with_oneOf_items/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_array_called_fields_with_oneOf_items/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Fields(BaseModel):
+    a: Optional[str] = None
+
+
+class Fields1(BaseModel):
+    b: Optional[str] = Field(None, regex='^[a-zA-Z_]+$')
+
+
+class BadSchema(BaseModel):
+    fields: Optional[List[Union[Fields, Fields1]]] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_array_enum/with_import_format.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_array_enum/with_import_format.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_array_enum/with_import_format.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_array_enum/with_import_format.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+from __future__ import annotations
+
+from enum import Enum
+from typing import List
+
+from pydantic import BaseModel
+
+
+class Type1Enum(Enum):
+    enumOne = 'enumOne'
+    enumTwo = 'enumTwo'
+
+
+class Type1(BaseModel):
+    __root__: List[Type1Enum]
+
+
+class Type2(Enum):
+    enumFour = 'enumFour'
+    enumFive = 'enumFive'
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_duplicate_models/output.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_duplicate_models/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_duplicate_models/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_duplicate_models/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,56 @@
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+
+
+class Result(BaseModel):
+    event: Optional[Event] = None
+
+
+class Events(BaseModel):
+    __root__: List[Event]
+
+
+class EventRoot(BaseModel):
+    __root__: Event
+
+
+class EventObject(BaseModel):
+    event: Optional[Event] = None
+
+
+class DuplicateObject1(BaseModel):
+    event: Optional[List[Event]] = None
+
+
+class Event1(BaseModel):
+    event: Optional[Event] = None
+
+
+class DuplicateObject2(BaseModel):
+    event: Optional[Event1] = None
+
+
+class DuplicateObject3(BaseModel):
+    __root__: Event
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_duplicate_models_simplify/output.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_duplicate_models_simplify/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_duplicate_models_simplify/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_duplicate_models_simplify/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+
+class M(BaseModel):
+    name: Optional[str] = None
+
+
+class R(M):
+    pass
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_enum_models/output.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_enum_models/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_enum_models/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_enum_models/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,138 @@
+from __future__ import annotations
+
+from enum import Enum, IntEnum
+from typing import List, Optional, Union
+
+from pydantic import BaseModel, Field
+
+
+class Kind(Enum):
+    dog = 'dog'
+    cat = 'cat'
+
+
+class Type(Enum):
+    animal = 'animal'
+
+
+class Number(IntEnum):
+    integer_1 = 1
+
+
+class Boolean(Enum):
+    boolean_True = True
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    kind: Optional[Kind] = None
+    type: Optional[Type] = None
+    number: Number
+    boolean: Boolean
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class Kind1(Enum):
+    snake = 'snake'
+    rabbit = 'rabbit'
+
+
+class Animal(BaseModel):
+    kind: Optional[Kind1] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Type1(Enum):
+    a = 'a'
+    b = 'b'
+
+
+class EnumObject(BaseModel):
+    type: Optional[Type1] = None
+
+
+class EnumRoot(Enum):
+    a = 'a'
+    b = 'b'
+
+
+class IntEnumModel(Enum):
+    number_1 = 1
+    number_2 = 2
+
+
+class AliasEnum(Enum):
+    a = 1
+    b = 2
+    c = 3
+
+
+class MultipleTypeEnum(Enum):
+    red = 'red'
+    amber = 'amber'
+    green = 'green'
+    NoneType_None = None
+    int_42 = 42
+
+
+class SingleEnum(Enum):
+    pet = 'pet'
+
+
+class ArrayEnumEnum(Enum):
+    cat = 'cat'
+
+
+class ArrayEnumEnum1(Enum):
+    dog = 'dog'
+
+
+class ArrayEnum(BaseModel):
+    __root__: List[Union[ArrayEnumEnum, ArrayEnumEnum1]]
+
+
+class NestedVersionEnum(Enum):
+    RC1 = 'RC1'
+    RC1N = 'RC1N'
+    RC2 = 'RC2'
+    RC2N = 'RC2N'
+    RC3 = 'RC3'
+    RC4 = 'RC4'
+
+
+class NestedVersion(BaseModel):
+    __root__: Optional[NestedVersionEnum] = Field(
+        'RC1', description='nullable enum', example='RC2'
+    )
+
+
+class NestedNullableEnum(BaseModel):
+    nested_version: Optional[NestedVersion] = Field(
+        default_factory=lambda: NestedVersion.parse_obj('RC1'),
+        description='nullable enum',
+        example='RC2',
+    )
+
+
+class VersionEnum(Enum):
+    RC1 = 'RC1'
+    RC1N = 'RC1N'
+    RC2 = 'RC2'
+    RC2N = 'RC2N'
+    RC3 = 'RC3'
+    RC4 = 'RC4'
+
+
+class Version(BaseModel):
+    __root__: Optional[VersionEnum] = Field(
+        'RC1', description='nullable enum', example='RC2'
+    )
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/__init__.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/__init__.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+from ._internal import DifferentTea, Error, Id, OptionalModel, Result, Source
+
+__all__ = ["DifferentTea", "Error", "Id", "OptionalModel", "Result", "Source"]
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/_internal.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/_internal.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/_internal.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/_internal.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,60 @@
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+from . import models
+
+
+class OptionalModel(BaseModel):
+    __root__: str
+
+
+class Id(BaseModel):
+    __root__: str
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Result(BaseModel):
+    event: Optional[models.Event] = None
+
+
+class Source(BaseModel):
+    country: Optional[str] = None
+
+
+class DifferentTea(BaseModel):
+    foo: Optional[Tea] = None
+    nested: Optional[Tea_1] = None
+
+
+class Tea(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+
+
+class Cocoa(BaseModel):
+    quality: Optional[int] = None
+
+
+class Tea_1(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class TeaClone(BaseModel):
+    flavour: Optional[str] = None
+    id: Optional[Id] = None
+    self: Optional[Tea_1] = None
+    optional: Optional[List[OptionalModel]] = None
+
+
+class ListModel(BaseModel):
+    __root__: List[Tea_1]
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/bar.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/bar.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+from __future__ import annotations
+
+from pydantic import BaseModel
+
+
+class Field(BaseModel):
+    __root__: str
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/collections.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/collections.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/collections.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/collections.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,39 @@
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional
+
+from pydantic import AnyUrl, BaseModel
+
+from . import models
+
+
+class Pets(BaseModel):
+    __root__: List[models.Pet]
+
+
+class Users(BaseModel):
+    __root__: List[models.User]
+
+
+class Rules(BaseModel):
+    __root__: List[str]
+
+
+class Stage(Enum):
+    test = 'test'
+    dev = 'dev'
+    stg = 'stg'
+    prod = 'prod'
+
+
+class Api(BaseModel):
+    apiKey: Optional[str] = None
+    apiVersionNumber: Optional[str] = None
+    apiUrl: Optional[AnyUrl] = None
+    apiDocumentationUrl: Optional[AnyUrl] = None
+    stage: Optional[Stage] = None
+
+
+class Apis(BaseModel):
+    __root__: List[Api]
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/foo/__init__.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/foo/__init__.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/foo/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/foo/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+from .._internal import Cocoa, Tea
+
+__all__ = ["Cocoa", "Tea"]
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/foo/bar.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/foo/bar.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/foo/bar.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/foo/bar.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+from __future__ import annotations
+
+from typing import Any, Dict, List, Optional
+
+from pydantic import BaseModel
+
+
+class Thing(BaseModel):
+    attributes: Optional[Dict[str, Any]] = None
+
+
+class Thang(BaseModel):
+    attributes: Optional[List[Dict[str, Any]]] = None
+
+
+class Others(BaseModel):
+    name: Optional[str] = None
+
+
+class Clone(Thing):
+    others: Optional[Others] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/models.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/models.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/models.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/models.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any, Dict, List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Species(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+    species: Optional[Species] = None
+
+
+class User(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Event(BaseModel):
+    name: Optional[Union[str, float, int, bool, Dict[str, Any], List[str]]] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/nested/foo.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/nested/foo.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/nested/foo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/nested/foo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,5 @@
+from .._internal import ListModel
+from .._internal import Tea_1 as Tea
+from .._internal import TeaClone
+
+__all__ = ["ListModel", "Tea", "TeaClone"]
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/woo/boo.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/woo/boo.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_modular/woo/boo.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_modular/woo/boo.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import BaseModel
+
+from .. import bar
+from .._internal import Cocoa, Source
+
+
+class Chocolate(BaseModel):
+    flavour: Optional[str] = None
+    source: Optional[Source] = None
+    cocoa: Optional[Cocoa] = None
+    field: Optional[bar.Field] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_nested_anyof/output.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_nested_anyof/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_nested_anyof/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_nested_anyof/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Type1(BaseModel):
+    prop: Optional[str] = None
+
+
+class Type2(BaseModel):
+    prop: Optional[str] = None
+
+
+class Container(BaseModel):
+    contents: List[Union[Type1, Type2]]
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_nested_oneof/output.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_nested_oneof/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_nested_oneof/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_nested_oneof/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+from typing import List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Type1(BaseModel):
+    prop: Optional[str] = None
+
+
+class Type2(BaseModel):
+    prop: Optional[str] = None
+
+
+class Container(BaseModel):
+    contents: List[Union[Type1, Type2]]
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_oneof/output.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_oneof/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_oneof/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_oneof/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,52 @@
+from __future__ import annotations
+
+from datetime import date
+from typing import Dict, List, Optional, Union
+
+from pydantic import BaseModel, constr
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Car(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class OneOfItem1(BaseModel):
+    name: Optional[str] = None
+
+
+class OneOfItem(BaseModel):
+    __root__: Union[Pet, Car, OneOfItem1, constr(max_length=5000)]
+
+
+class Item(BaseModel):
+    name: Optional[str] = None
+
+
+class OneOfobj(BaseModel):
+    item: Optional[Union[Pet, Car, Item, constr(max_length=5000)]] = None
+
+
+class OneOfArray1(BaseModel):
+    name: Optional[str] = None
+    birthday: Optional[date] = None
+
+
+class OneOfArray(BaseModel):
+    __root__: List[Union[Pet, Car, OneOfArray1, constr(max_length=5000)]]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Config(BaseModel):
+    setting: Optional[Dict[str, Union[str, List[str]]]] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_remote_ref/with_import_format.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_remote_ref/with_import_format.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_remote_ref/with_import_format.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_remote_ref/with_import_format.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+from __future__ import annotations
+
+from typing import Optional
+
+from pydantic import AnyUrl, BaseModel, conint
+
+
+class Problem(BaseModel):
+    detail: Optional[str] = None
+    instance: Optional[AnyUrl] = None
+    status: Optional[conint(ge=100, lt=600)] = None
+    title: Optional[str] = None
+    type: Optional[AnyUrl] = 'about:blank'
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_required_null/output.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_required_null/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_required_null/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_required_null/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+from enum import Enum
+from typing import Optional
+
+from pydantic import BaseModel, Field
+
+
+class Type(Enum):
+    my_first_object = 'my_first_object'
+    my_second_object = 'my_second_object'
+    my_third_object = 'my_third_object'
+
+
+class ObjectBase(BaseModel):
+    name: Optional[str] = Field(None, description='Name of the object')
+    type: Optional[Type] = Field(None, description='Object type')
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_resolved_models/output.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_resolved_models/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_parse_resolved_models/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_parse_resolved_models/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Resolved(BaseModel):
+    resolved: Optional[List[str]] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_responses_with_tag/output.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_responses_with_tag/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_responses_with_tag/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_responses_with_tag/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,62 @@
+from __future__ import annotations
+
+from datetime import datetime
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class PetForm(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+
+
+class PetsGetResponse(BaseModel):
+    __root__: List[Pet]
+
+
+class FoodFoodIdGetResponse(BaseModel):
+    __root__: List[int]
+
+
+class UserGetResponse(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UserPostRequest(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersGetResponseItem(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersGetResponse(BaseModel):
+    __root__: List[UsersGetResponseItem]
+
+
+class UsersPostRequestItem(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersPostRequest(BaseModel):
+    __root__: List[UsersPostRequestItem]
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_responses_without_content/output.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_responses_without_content/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_responses_without_content/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_responses_without_content/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,62 @@
+from __future__ import annotations
+
+from datetime import datetime
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class FoodFoodIdGetResponse(BaseModel):
+    __root__: List[int]
+
+
+class UserGetResponse(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UserPostRequest(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersGetResponseItem(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersGetResponse(BaseModel):
+    __root__: List[UsersGetResponseItem]
+
+
+class UsersPostRequestItem(BaseModel):
+    timestamp: datetime
+    name: str
+    age: Optional[str] = None
+
+
+class UsersPostRequest(BaseModel):
+    __root__: List[UsersPostRequestItem]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class PetForm(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+
+
+class PetsGetResponse(BaseModel):
+    __root__: List[Pet]
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_with_query_parameters/output.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_with_query_parameters/output.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_with_query_parameters/output.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_with_query_parameters/output.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,65 @@
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class PetForm(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+
+
+class PetsPetIdGetParametersQuery(BaseModel):
+    include: Optional[str] = None
+
+
+class Filter(BaseModel):
+    type: Optional[str] = None
+    color: Optional[str] = None
+
+
+class MediaType(Enum):
+    xml = 'xml'
+    json = 'json'
+
+
+class MultipleMediaFilter(BaseModel):
+    type: Optional[str] = None
+    media_type: Optional[MediaType] = 'xml'
+
+
+class MultipleMediaFilter1(BaseModel):
+    type: Optional[str] = None
+    media_type: Optional[MediaType] = 'json'
+
+
+class PetsGetParametersQuery(BaseModel):
+    limit: Optional[int] = 0
+    HomeAddress: Optional[str] = 'Unknown'
+    kind: Optional[str] = 'dog'
+    filter: Optional[Filter] = None
+    multipleMediaFilter: Optional[
+        Union[MultipleMediaFilter, MultipleMediaFilter1]
+    ] = None
+
+
+class PetsGetResponse(BaseModel):
+    __root__: List[Pet]
+
+
+class PetsPostRequest(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_with_query_parameters/with_path_params.py 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_with_query_parameters/with_path_params.py
--- 0.26.4-3/tests/data/expected/parser/openapi/openapi_parser_with_query_parameters/with_path_params.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/expected/parser/openapi/openapi_parser_with_query_parameters/with_path_params.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,66 @@
+from __future__ import annotations
+
+from enum import Enum
+from typing import List, Optional, Union
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class PetForm(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
+
+
+class PetsPetIdGetParameters(BaseModel):
+    petId: str
+    include: Optional[str] = None
+
+
+class Filter(BaseModel):
+    type: Optional[str] = None
+    color: Optional[str] = None
+
+
+class MediaType(Enum):
+    xml = 'xml'
+    json = 'json'
+
+
+class MultipleMediaFilter(BaseModel):
+    type: Optional[str] = None
+    media_type: Optional[MediaType] = 'xml'
+
+
+class MultipleMediaFilter1(BaseModel):
+    type: Optional[str] = None
+    media_type: Optional[MediaType] = 'json'
+
+
+class PetsGetParameters(BaseModel):
+    limit: Optional[int] = 0
+    HomeAddress: Optional[str] = 'Unknown'
+    kind: Optional[str] = 'dog'
+    filter: Optional[Filter] = None
+    multipleMediaFilter: Optional[
+        Union[MultipleMediaFilter, MultipleMediaFilter1]
+    ] = None
+
+
+class PetsGetResponse(BaseModel):
+    __root__: List[Pet]
+
+
+class PetsPostRequest(BaseModel):
+    name: Optional[str] = None
+    age: Optional[int] = None
diff -pruN 0.26.4-3/tests/data/graphql/additional-imports-types.json 0.45.0-1/tests/data/graphql/additional-imports-types.json
--- 0.26.4-3/tests/data/graphql/additional-imports-types.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/additional-imports-types.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "Date": {
+    "py_type": "date"
+  },
+  "DateTime": {
+    "py_type": "datetime"
+  },
+  "MyCustomClass": {
+    "py_type": "MyCustomPythonClass"
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/additional-imports.graphql 0.45.0-1/tests/data/graphql/additional-imports.graphql
--- 0.26.4-3/tests/data/graphql/additional-imports.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/additional-imports.graphql	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+scalar Date
+
+"DateTime (ISO8601, example: 2020-01-01T10:11:12+00:00)"
+scalar DateTime
+
+scalar MyCustomClass
+
+type A {
+  a: Date!
+  b: DateTime!
+  c: MyCustomClass!
+}
diff -pruN 0.26.4-3/tests/data/graphql/annotated.graphql 0.45.0-1/tests/data/graphql/annotated.graphql
--- 0.26.4-3/tests/data/graphql/annotated.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/annotated.graphql	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+type A {
+    field: String!
+    optionalField: String
+    listField: [String!]!
+    listOptionalField: [String]!
+    optionalListField: [String!]
+    optionalListOptionalField: [String]
+    listListField:[[String!]!]!
+}
diff -pruN 0.26.4-3/tests/data/graphql/casing.graphql 0.45.0-1/tests/data/graphql/casing.graphql
--- 0.26.4-3/tests/data/graphql/casing.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/casing.graphql	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+enum lowercase {
+  foo
+}
+
+type lowercasetype {
+  foo: Int!
+}
+
+type conflict {
+  foo: String!
+  Foo: Int!
+}
+
+type Conflict {
+  bar: String!
+  Bar: Int!
+}
+
+type Ref {
+  bar: lowercase!
+  baz: lowercasetype!
+  spam: conflict!
+  eggs: Conflict!
+}
diff -pruN 0.26.4-3/tests/data/graphql/custom-scalar-types.graphql 0.45.0-1/tests/data/graphql/custom-scalar-types.graphql
--- 0.26.4-3/tests/data/graphql/custom-scalar-types.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/custom-scalar-types.graphql	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+scalar Long
+
+type A {
+  id: ID!
+  duration: Long!
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/custom-scalar-types.json 0.45.0-1/tests/data/graphql/custom-scalar-types.json
--- 0.26.4-3/tests/data/graphql/custom-scalar-types.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/custom-scalar-types.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,5 @@
+{
+  "Long": {
+    "py_type": "int"
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/different-types-of-fields.graphql 0.45.0-1/tests/data/graphql/different-types-of-fields.graphql
--- 0.26.4-3/tests/data/graphql/different-types-of-fields.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/different-types-of-fields.graphql	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+type A {
+    field: String!
+    optionalField: String
+    optionalListOptionalField: [String]
+    listOptionalField: [String]!
+    listField: [String!]!
+    optionalListOptionalListOptionalField:[[String]]
+    optionalListListOptionalField:[[String]!]
+    listListOptionalField:[[String]!]!
+    listOptionalListOptionalField:[[String]]!
+    optionalListOptionalListField:[[String!]]
+    optionalListListField:[[String!]!]
+    listListField:[[String!]!]!
+    listOptionalListField:[[String!]]!
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/enums.graphql 0.45.0-1/tests/data/graphql/enums.graphql
--- 0.26.4-3/tests/data/graphql/enums.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/enums.graphql	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+"Employee shift status"
+enum EmployeeShiftStatus {
+  "not on shift"
+  NOT_ON_SHIFT
+  "on shift"
+  ON_SHIFT
+}
+
+enum Color {
+  RED
+  GREEN
+  BLUE
+}
+
+enum EnumWithOneField {
+    FIELD
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/field-aliases.graphql 0.45.0-1/tests/data/graphql/field-aliases.graphql
--- 0.26.4-3/tests/data/graphql/field-aliases.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/field-aliases.graphql	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+scalar DateTime
+
+type DateTimePeriod {
+    from: DateTime!
+    to: DateTime!
+}
diff -pruN 0.26.4-3/tests/data/graphql/field-aliases.json 0.45.0-1/tests/data/graphql/field-aliases.json
--- 0.26.4-3/tests/data/graphql/field-aliases.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/field-aliases.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,4 @@
+{
+  "to": "periodTo",
+  "from": "periodFrom"
+}
diff -pruN 0.26.4-3/tests/data/graphql/field-default-enum.graphql 0.45.0-1/tests/data/graphql/field-default-enum.graphql
--- 0.26.4-3/tests/data/graphql/field-default-enum.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/field-default-enum.graphql	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+enum Color {
+  RED
+  GREEN
+  BLUE
+}
+
+input Car {
+  colorList: [Color!] = [RED]
+  colorOne: Color = GREEN
+}
diff -pruN 0.26.4-3/tests/data/graphql/github-api-aliases.json 0.45.0-1/tests/data/graphql/github-api-aliases.json
--- 0.26.4-3/tests/data/graphql/github-api-aliases.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/github-api-aliases.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+{
+  "fields": "fields_"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/github-api-types.json 0.45.0-1/tests/data/graphql/github-api-types.json
--- 0.26.4-3/tests/data/graphql/github-api-types.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/github-api-types.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "Date": {
+    "py_type": "date"
+  },
+  "DateTime": {
+    "py_type": "datetime"
+  },
+  "BigInt": {
+    "py_type": "int"
+  },
+  "PreciseDateTime": {
+    "py_type": "datetime"
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/github-api.graphql 0.45.0-1/tests/data/graphql/github-api.graphql
--- 0.26.4-3/tests/data/graphql/github-api.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/github-api.graphql	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,61141 @@
+directive @requiredCapabilities(
+  requiredCapabilities: [String!]
+) on ARGUMENT_DEFINITION | ENUM | ENUM_VALUE | FIELD_DEFINITION | INPUT_FIELD_DEFINITION | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION
+
+"""
+Marks an element of a GraphQL schema as only available via a preview header
+"""
+directive @preview(
+  """
+  The identifier of the API preview that toggles this field.
+  """
+  toggledBy: String!
+) on ARGUMENT_DEFINITION | ENUM | ENUM_VALUE | FIELD_DEFINITION | INPUT_FIELD_DEFINITION | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION
+
+"""
+Defines what type of global IDs are accepted for a mutation argument of type ID.
+"""
+directive @possibleTypes(
+  """
+  Abstract type of accepted global ID
+  """
+  abstractType: String
+
+  """
+  Accepted types of global IDs.
+  """
+  concreteTypes: [String!]!
+) on INPUT_FIELD_DEFINITION
+
+"""
+Autogenerated input type of AbortQueuedMigrations
+"""
+input AbortQueuedMigrationsInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the organization that is running the migrations.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of AbortQueuedMigrations
+"""
+type AbortQueuedMigrationsPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Did the operation succeed?
+  """
+  success: Boolean
+}
+
+"""
+Autogenerated input type of AbortRepositoryMigration
+"""
+input AbortRepositoryMigrationInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the migration to be aborted.
+  """
+  migrationId: ID! @possibleTypes(concreteTypes: ["RepositoryMigration"])
+}
+
+"""
+Autogenerated return type of AbortRepositoryMigration
+"""
+type AbortRepositoryMigrationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Did the operation succeed?
+  """
+  success: Boolean
+}
+
+"""
+Autogenerated input type of AcceptEnterpriseAdministratorInvitation
+"""
+input AcceptEnterpriseAdministratorInvitationInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The id of the invitation being accepted
+  """
+  invitationId: ID! @possibleTypes(concreteTypes: ["EnterpriseAdministratorInvitation"])
+}
+
+"""
+Autogenerated return type of AcceptEnterpriseAdministratorInvitation
+"""
+type AcceptEnterpriseAdministratorInvitationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The invitation that was accepted.
+  """
+  invitation: EnterpriseAdministratorInvitation
+
+  """
+  A message confirming the result of accepting an administrator invitation.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of AcceptTopicSuggestion
+"""
+input AcceptTopicSuggestionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of the suggested topic.
+  """
+  name: String!
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of AcceptTopicSuggestion
+"""
+type AcceptTopicSuggestionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The accepted topic.
+  """
+  topic: Topic
+}
+
+"""
+Represents an object which can take actions on GitHub. Typically a User or Bot.
+"""
+interface Actor {
+  """
+  A URL pointing to the actor's public avatar.
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  The username of the actor.
+  """
+  login: String!
+
+  """
+  The HTTP path for this actor.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this actor.
+  """
+  url: URI!
+}
+
+"""
+Location information for an actor
+"""
+type ActorLocation {
+  """
+  City
+  """
+  city: String
+
+  """
+  Country name
+  """
+  country: String
+
+  """
+  Country code
+  """
+  countryCode: String
+
+  """
+  Region name
+  """
+  region: String
+
+  """
+  Region or state code
+  """
+  regionCode: String
+}
+
+"""
+The actor's type.
+"""
+enum ActorType {
+  """
+  Indicates a team actor.
+  """
+  TEAM
+
+  """
+  Indicates a user actor.
+  """
+  USER
+}
+
+"""
+Autogenerated input type of AddAssigneesToAssignable
+"""
+input AddAssigneesToAssignableInput {
+  """
+  The id of the assignable object to add assignees to.
+  """
+  assignableId: ID! @possibleTypes(concreteTypes: ["Issue", "PullRequest"], abstractType: "Assignable")
+
+  """
+  The id of users to add as assignees.
+  """
+  assigneeIds: [ID!]! @possibleTypes(concreteTypes: ["User"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated return type of AddAssigneesToAssignable
+"""
+type AddAssigneesToAssignablePayload {
+  """
+  The item that was assigned.
+  """
+  assignable: Assignable
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of AddComment
+"""
+input AddCommentInput {
+  """
+  The contents of the comment.
+  """
+  body: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the subject to modify.
+  """
+  subjectId: ID! @possibleTypes(concreteTypes: ["Issue", "PullRequest"], abstractType: "IssueOrPullRequest")
+}
+
+"""
+Autogenerated return type of AddComment
+"""
+type AddCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The edge from the subject's comment connection.
+  """
+  commentEdge: IssueCommentEdge
+
+  """
+  The subject
+  """
+  subject: Node
+
+  """
+  The edge from the subject's timeline connection.
+  """
+  timelineEdge: IssueTimelineItemEdge
+}
+
+"""
+Autogenerated input type of AddDiscussionComment
+"""
+input AddDiscussionCommentInput {
+  """
+  The contents of the comment.
+  """
+  body: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the discussion to comment on.
+  """
+  discussionId: ID! @possibleTypes(concreteTypes: ["Discussion"])
+
+  """
+  The Node ID of the discussion comment within this discussion to reply to.
+  """
+  replyToId: ID @possibleTypes(concreteTypes: ["DiscussionComment"])
+}
+
+"""
+Autogenerated return type of AddDiscussionComment
+"""
+type AddDiscussionCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The newly created discussion comment.
+  """
+  comment: DiscussionComment
+}
+
+"""
+Autogenerated input type of AddDiscussionPollVote
+"""
+input AddDiscussionPollVoteInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the discussion poll option to vote for.
+  """
+  pollOptionId: ID! @possibleTypes(concreteTypes: ["DiscussionPollOption"])
+}
+
+"""
+Autogenerated return type of AddDiscussionPollVote
+"""
+type AddDiscussionPollVotePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The poll option that a vote was added to.
+  """
+  pollOption: DiscussionPollOption
+}
+
+"""
+Autogenerated input type of AddEnterpriseOrganizationMember
+"""
+input AddEnterpriseOrganizationMemberInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise which owns the organization.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The ID of the organization the users will be added to.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+
+  """
+  The role to assign the users in the organization
+  """
+  role: OrganizationMemberRole
+
+  """
+  The IDs of the enterprise members to add.
+  """
+  userIds: [ID!]!
+}
+
+"""
+Autogenerated return type of AddEnterpriseOrganizationMember
+"""
+type AddEnterpriseOrganizationMemberPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The users who were added to the organization.
+  """
+  users: [User!]
+}
+
+"""
+Autogenerated input type of AddEnterpriseSupportEntitlement
+"""
+input AddEnterpriseSupportEntitlementInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Enterprise which the admin belongs to.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The login of a member who will receive the support entitlement.
+  """
+  login: String!
+}
+
+"""
+Autogenerated return type of AddEnterpriseSupportEntitlement
+"""
+type AddEnterpriseSupportEntitlementPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A message confirming the result of adding the support entitlement.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of AddLabelsToLabelable
+"""
+input AddLabelsToLabelableInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ids of the labels to add.
+  """
+  labelIds: [ID!]! @possibleTypes(concreteTypes: ["Label"])
+
+  """
+  The id of the labelable object to add labels to.
+  """
+  labelableId: ID! @possibleTypes(concreteTypes: ["Discussion", "Issue", "PullRequest"], abstractType: "Labelable")
+}
+
+"""
+Autogenerated return type of AddLabelsToLabelable
+"""
+type AddLabelsToLabelablePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The item that was labeled.
+  """
+  labelable: Labelable
+}
+
+"""
+Autogenerated input type of AddProjectCard
+"""
+input AddProjectCardInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The content of the card. Must be a member of the ProjectCardItem union
+  """
+  contentId: ID @possibleTypes(concreteTypes: ["Issue", "PullRequest"], abstractType: "ProjectCardItem")
+
+  """
+  The note on the card.
+  """
+  note: String
+
+  """
+  The Node ID of the ProjectColumn.
+  """
+  projectColumnId: ID! @possibleTypes(concreteTypes: ["ProjectColumn"])
+}
+
+"""
+Autogenerated return type of AddProjectCard
+"""
+type AddProjectCardPayload {
+  """
+  The edge from the ProjectColumn's card connection.
+  """
+  cardEdge: ProjectCardEdge
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ProjectColumn
+  """
+  projectColumn: ProjectColumn
+}
+
+"""
+Autogenerated input type of AddProjectColumn
+"""
+input AddProjectColumnInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of the column.
+  """
+  name: String!
+
+  """
+  The Node ID of the project.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["Project"])
+}
+
+"""
+Autogenerated return type of AddProjectColumn
+"""
+type AddProjectColumnPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The edge from the project's column connection.
+  """
+  columnEdge: ProjectColumnEdge
+
+  """
+  The project
+  """
+  project: Project
+}
+
+"""
+Autogenerated input type of AddProjectV2DraftIssue
+"""
+input AddProjectV2DraftIssueInput {
+  """
+  The IDs of the assignees of the draft issue.
+  """
+  assigneeIds: [ID!] @possibleTypes(concreteTypes: ["User"])
+
+  """
+  The body of the draft issue.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Project to add the draft issue to.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+
+  """
+  The title of the draft issue. A project item can also be created by providing
+  the URL of an Issue or Pull Request if you have access.
+  """
+  title: String!
+}
+
+"""
+Autogenerated return type of AddProjectV2DraftIssue
+"""
+type AddProjectV2DraftIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The draft issue added to the project.
+  """
+  projectItem: ProjectV2Item
+}
+
+"""
+Autogenerated input type of AddProjectV2ItemById
+"""
+input AddProjectV2ItemByIdInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The id of the Issue or Pull Request to add.
+  """
+  contentId: ID!
+    @possibleTypes(concreteTypes: ["DraftIssue", "Issue", "PullRequest"], abstractType: "ProjectV2ItemContent")
+
+  """
+  The ID of the Project to add the item to.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated return type of AddProjectV2ItemById
+"""
+type AddProjectV2ItemByIdPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The item added to the project.
+  """
+  item: ProjectV2Item
+}
+
+"""
+Autogenerated input type of AddPullRequestReviewComment
+"""
+input AddPullRequestReviewCommentInput {
+  """
+  The text of the comment. This field is required
+
+  **Upcoming Change on 2023-10-01 UTC**
+  **Description:** `body` will be removed. use addPullRequestReviewThread or addPullRequestReviewThreadReply instead
+  **Reason:** We are deprecating the addPullRequestReviewComment mutation
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The SHA of the commit to comment on.
+
+  **Upcoming Change on 2023-10-01 UTC**
+  **Description:** `commitOID` will be removed. use addPullRequestReviewThread or addPullRequestReviewThreadReply instead
+  **Reason:** We are deprecating the addPullRequestReviewComment mutation
+  """
+  commitOID: GitObjectID
+
+  """
+  The comment id to reply to.
+
+  **Upcoming Change on 2023-10-01 UTC**
+  **Description:** `inReplyTo` will be removed. use addPullRequestReviewThread or addPullRequestReviewThreadReply instead
+  **Reason:** We are deprecating the addPullRequestReviewComment mutation
+  """
+  inReplyTo: ID @possibleTypes(concreteTypes: ["PullRequestReviewComment"])
+
+  """
+  The relative path of the file to comment on.
+
+  **Upcoming Change on 2023-10-01 UTC**
+  **Description:** `path` will be removed. use addPullRequestReviewThread or addPullRequestReviewThreadReply instead
+  **Reason:** We are deprecating the addPullRequestReviewComment mutation
+  """
+  path: String
+
+  """
+  The line index in the diff to comment on.
+
+  **Upcoming Change on 2023-10-01 UTC**
+  **Description:** `position` will be removed. use addPullRequestReviewThread or addPullRequestReviewThreadReply instead
+  **Reason:** We are deprecating the addPullRequestReviewComment mutation
+  """
+  position: Int
+
+  """
+  The node ID of the pull request reviewing
+
+  **Upcoming Change on 2023-10-01 UTC**
+  **Description:** `pullRequestId` will be removed. use
+  addPullRequestReviewThread or addPullRequestReviewThreadReply instead
+  **Reason:** We are deprecating the addPullRequestReviewComment mutation
+  """
+  pullRequestId: ID @possibleTypes(concreteTypes: ["PullRequest"])
+
+  """
+  The Node ID of the review to modify.
+
+  **Upcoming Change on 2023-10-01 UTC**
+  **Description:** `pullRequestReviewId` will be removed. use
+  addPullRequestReviewThread or addPullRequestReviewThreadReply instead
+  **Reason:** We are deprecating the addPullRequestReviewComment mutation
+  """
+  pullRequestReviewId: ID @possibleTypes(concreteTypes: ["PullRequestReview"])
+}
+
+"""
+Autogenerated return type of AddPullRequestReviewComment
+"""
+type AddPullRequestReviewCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The newly created comment.
+  """
+  comment: PullRequestReviewComment
+
+  """
+  The edge from the review's comment connection.
+  """
+  commentEdge: PullRequestReviewCommentEdge
+}
+
+"""
+Autogenerated input type of AddPullRequestReview
+"""
+input AddPullRequestReviewInput {
+  """
+  The contents of the review body comment.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The review line comments.
+
+  **Upcoming Change on 2023-10-01 UTC**
+  **Description:** `comments` will be removed. use the `threads` argument instead
+  **Reason:** We are deprecating comment fields that use diff-relative positioning
+  """
+  comments: [DraftPullRequestReviewComment]
+
+  """
+  The commit OID the review pertains to.
+  """
+  commitOID: GitObjectID
+
+  """
+  The event to perform on the pull request review.
+  """
+  event: PullRequestReviewEvent
+
+  """
+  The Node ID of the pull request to modify.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+
+  """
+  The review line comment threads.
+  """
+  threads: [DraftPullRequestReviewThread]
+}
+
+"""
+Autogenerated return type of AddPullRequestReview
+"""
+type AddPullRequestReviewPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The newly created pull request review.
+  """
+  pullRequestReview: PullRequestReview
+
+  """
+  The edge from the pull request's review connection.
+  """
+  reviewEdge: PullRequestReviewEdge
+}
+
+"""
+Autogenerated input type of AddPullRequestReviewThread
+"""
+input AddPullRequestReviewThreadInput {
+  """
+  Body of the thread's first comment.
+  """
+  body: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The line of the blob to which the thread refers, required for line-level
+  threads. The end of the line range for multi-line comments.
+  """
+  line: Int
+
+  """
+  Path to the file being commented on.
+  """
+  path: String!
+
+  """
+  The node ID of the pull request reviewing
+  """
+  pullRequestId: ID @possibleTypes(concreteTypes: ["PullRequest"])
+
+  """
+  The Node ID of the review to modify.
+  """
+  pullRequestReviewId: ID @possibleTypes(concreteTypes: ["PullRequestReview"])
+
+  """
+  The side of the diff on which the line resides. For multi-line comments, this is the side for the end of the line range.
+  """
+  side: DiffSide = RIGHT
+
+  """
+  The first line of the range to which the comment refers.
+  """
+  startLine: Int
+
+  """
+  The side of the diff on which the start line resides.
+  """
+  startSide: DiffSide = RIGHT
+
+  """
+  The level at which the comments in the corresponding thread are targeted, can be a diff line or a file
+  """
+  subjectType: PullRequestReviewThreadSubjectType = LINE
+}
+
+"""
+Autogenerated return type of AddPullRequestReviewThread
+"""
+type AddPullRequestReviewThreadPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The newly created thread.
+  """
+  thread: PullRequestReviewThread
+}
+
+"""
+Autogenerated input type of AddPullRequestReviewThreadReply
+"""
+input AddPullRequestReviewThreadReplyInput {
+  """
+  The text of the reply.
+  """
+  body: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the pending review to which the reply will belong.
+  """
+  pullRequestReviewId: ID @possibleTypes(concreteTypes: ["PullRequestReview"])
+
+  """
+  The Node ID of the thread to which this reply is being written.
+  """
+  pullRequestReviewThreadId: ID! @possibleTypes(concreteTypes: ["PullRequestReviewThread"])
+}
+
+"""
+Autogenerated return type of AddPullRequestReviewThreadReply
+"""
+type AddPullRequestReviewThreadReplyPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The newly created reply.
+  """
+  comment: PullRequestReviewComment
+}
+
+"""
+Autogenerated input type of AddReaction
+"""
+input AddReactionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of the emoji to react with.
+  """
+  content: ReactionContent!
+
+  """
+  The Node ID of the subject to modify.
+  """
+  subjectId: ID!
+    @possibleTypes(
+      concreteTypes: [
+        "CommitComment"
+        "Discussion"
+        "DiscussionComment"
+        "Issue"
+        "IssueComment"
+        "PullRequest"
+        "PullRequestReview"
+        "PullRequestReviewComment"
+        "Release"
+        "TeamDiscussion"
+        "TeamDiscussionComment"
+      ]
+      abstractType: "Reactable"
+    )
+}
+
+"""
+Autogenerated return type of AddReaction
+"""
+type AddReactionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The reaction object.
+  """
+  reaction: Reaction
+
+  """
+  The reaction groups for the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  The reactable subject.
+  """
+  subject: Reactable
+}
+
+"""
+Autogenerated input type of AddStar
+"""
+input AddStarInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Starrable ID to star.
+  """
+  starrableId: ID! @possibleTypes(concreteTypes: ["Gist", "Repository", "Topic"], abstractType: "Starrable")
+}
+
+"""
+Autogenerated return type of AddStar
+"""
+type AddStarPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The starrable.
+  """
+  starrable: Starrable
+}
+
+"""
+Autogenerated input type of AddUpvote
+"""
+input AddUpvoteInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the discussion or comment to upvote.
+  """
+  subjectId: ID! @possibleTypes(concreteTypes: ["Discussion", "DiscussionComment"], abstractType: "Votable")
+}
+
+"""
+Autogenerated return type of AddUpvote
+"""
+type AddUpvotePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The votable subject.
+  """
+  subject: Votable
+}
+
+"""
+Autogenerated input type of AddVerifiableDomain
+"""
+input AddVerifiableDomainInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The URL of the domain
+  """
+  domain: URI!
+
+  """
+  The ID of the owner to add the domain to
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Enterprise", "Organization"], abstractType: "VerifiableDomainOwner")
+}
+
+"""
+Autogenerated return type of AddVerifiableDomain
+"""
+type AddVerifiableDomainPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The verifiable domain that was added.
+  """
+  domain: VerifiableDomain
+}
+
+"""
+Represents an 'added_to_merge_queue' event on a given pull request.
+"""
+type AddedToMergeQueueEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The user who added this Pull Request to the merge queue
+  """
+  enqueuer: User
+
+  """
+  The Node ID of the AddedToMergeQueueEvent object
+  """
+  id: ID!
+
+  """
+  The merge queue where this pull request was added to.
+  """
+  mergeQueue: MergeQueue
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents a 'added_to_project' event on a given issue or pull request.
+"""
+type AddedToProjectEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the AddedToProjectEvent object
+  """
+  id: ID!
+
+  """
+  Project referenced by event.
+  """
+  project: Project @preview(toggledBy: "starfox-preview")
+
+  """
+  Project card referenced by this project event.
+  """
+  projectCard: ProjectCard @preview(toggledBy: "starfox-preview")
+
+  """
+  Column name referenced by this project event.
+  """
+  projectColumnName: String! @preview(toggledBy: "starfox-preview")
+}
+
+"""
+Represents an announcement banner.
+"""
+interface AnnouncementBanner {
+  """
+  The text of the announcement
+  """
+  announcement: String
+
+  """
+  The expiration date of the announcement, if any
+  """
+  announcementExpiresAt: DateTime
+
+  """
+  Whether the announcement can be dismissed by the user
+  """
+  announcementUserDismissible: Boolean
+}
+
+"""
+A GitHub App.
+"""
+type App implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The description of the app.
+  """
+  description: String
+
+  """
+  The Node ID of the App object
+  """
+  id: ID!
+
+  """
+  The IP addresses of the app.
+  """
+  ipAllowListEntries(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for IP allow list entries returned.
+    """
+    orderBy: IpAllowListEntryOrder = {field: ALLOW_LIST_VALUE, direction: ASC}
+  ): IpAllowListEntryConnection!
+
+  """
+  The hex color code, without the leading '#', for the logo background.
+  """
+  logoBackgroundColor: String!
+
+  """
+  A URL pointing to the app's logo.
+  """
+  logoUrl(
+    """
+    The size of the resulting image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  The name of the app.
+  """
+  name: String!
+
+  """
+  A slug based on the name of the app for use in URLs.
+  """
+  slug: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The URL to the app's homepage.
+  """
+  url: URI!
+}
+
+"""
+Autogenerated input type of ApproveDeployments
+"""
+input ApproveDeploymentsInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Optional comment for approving deployments
+  """
+  comment: String = ""
+
+  """
+  The ids of environments to reject deployments
+  """
+  environmentIds: [ID!]!
+
+  """
+  The node ID of the workflow run containing the pending deployments.
+  """
+  workflowRunId: ID! @possibleTypes(concreteTypes: ["WorkflowRun"])
+}
+
+"""
+Autogenerated return type of ApproveDeployments
+"""
+type ApproveDeploymentsPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The affected deployments.
+  """
+  deployments: [Deployment!]
+}
+
+"""
+Autogenerated input type of ApproveVerifiableDomain
+"""
+input ApproveVerifiableDomainInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the verifiable domain to approve.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["VerifiableDomain"])
+}
+
+"""
+Autogenerated return type of ApproveVerifiableDomain
+"""
+type ApproveVerifiableDomainPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The verifiable domain that was approved.
+  """
+  domain: VerifiableDomain
+}
+
+"""
+Autogenerated input type of ArchiveProjectV2Item
+"""
+input ArchiveProjectV2ItemInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the ProjectV2Item to archive.
+  """
+  itemId: ID! @possibleTypes(concreteTypes: ["ProjectV2Item"])
+
+  """
+  The ID of the Project to archive the item from.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated return type of ArchiveProjectV2Item
+"""
+type ArchiveProjectV2ItemPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The item archived from the project.
+  """
+  item: ProjectV2Item
+}
+
+"""
+Autogenerated input type of ArchiveRepository
+"""
+input ArchiveRepositoryInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the repository to mark as archived.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of ArchiveRepository
+"""
+type ArchiveRepositoryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The repository that was marked as archived.
+  """
+  repository: Repository
+}
+
+"""
+An object that can have users assigned to it.
+"""
+interface Assignable {
+  """
+  A list of Users assigned to this object.
+  """
+  assignees(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+}
+
+"""
+Represents an 'assigned' event on any assignable object.
+"""
+type AssignedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the assignable associated with the event.
+  """
+  assignable: Assignable!
+
+  """
+  Identifies the user or mannequin that was assigned.
+  """
+  assignee: Assignee
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the AssignedEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the user who was assigned.
+  """
+  user: User
+    @deprecated(reason: "Assignees can now be mannequins. Use the `assignee` field instead. Removal on 2020-01-01 UTC.")
+}
+
+"""
+Types that can be assigned to issues.
+"""
+union Assignee = Bot | Mannequin | Organization | User
+
+"""
+An entry in the audit log.
+"""
+interface AuditEntry {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Types that can initiate an audit log event.
+"""
+union AuditEntryActor = Bot | Organization | User
+
+"""
+Ordering options for Audit Log connections.
+"""
+input AuditLogOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection
+
+  """
+  The field to order Audit Logs by.
+  """
+  field: AuditLogOrderField
+}
+
+"""
+Properties by which Audit Log connections can be ordered.
+"""
+enum AuditLogOrderField {
+  """
+  Order audit log entries by timestamp
+  """
+  CREATED_AT
+}
+
+"""
+Represents a 'auto_merge_disabled' event on a given pull request.
+"""
+type AutoMergeDisabledEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The user who disabled auto-merge for this Pull Request
+  """
+  disabler: User
+
+  """
+  The Node ID of the AutoMergeDisabledEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event
+  """
+  pullRequest: PullRequest
+
+  """
+  The reason auto-merge was disabled
+  """
+  reason: String
+
+  """
+  The reason_code relating to why auto-merge was disabled
+  """
+  reasonCode: String
+}
+
+"""
+Represents a 'auto_merge_enabled' event on a given pull request.
+"""
+type AutoMergeEnabledEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The user who enabled auto-merge for this Pull Request
+  """
+  enabler: User
+
+  """
+  The Node ID of the AutoMergeEnabledEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents an auto-merge request for a pull request
+"""
+type AutoMergeRequest {
+  """
+  The email address of the author of this auto-merge request.
+  """
+  authorEmail: String
+
+  """
+  The commit message of the auto-merge request. If a merge queue is required by
+  the base branch, this value will be set by the merge queue when merging.
+  """
+  commitBody: String
+
+  """
+  The commit title of the auto-merge request. If a merge queue is required by
+  the base branch, this value will be set by the merge queue when merging
+  """
+  commitHeadline: String
+
+  """
+  When was this auto-merge request was enabled.
+  """
+  enabledAt: DateTime
+
+  """
+  The actor who created the auto-merge request.
+  """
+  enabledBy: Actor
+
+  """
+  The merge method of the auto-merge request. If a merge queue is required by
+  the base branch, this value will be set by the merge queue when merging.
+  """
+  mergeMethod: PullRequestMergeMethod!
+
+  """
+  The pull request that this auto-merge request is set against.
+  """
+  pullRequest: PullRequest!
+}
+
+"""
+Represents a 'auto_rebase_enabled' event on a given pull request.
+"""
+type AutoRebaseEnabledEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The user who enabled auto-merge (rebase) for this Pull Request
+  """
+  enabler: User
+
+  """
+  The Node ID of the AutoRebaseEnabledEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents a 'auto_squash_enabled' event on a given pull request.
+"""
+type AutoSquashEnabledEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The user who enabled auto-merge (squash) for this Pull Request
+  """
+  enabler: User
+
+  """
+  The Node ID of the AutoSquashEnabledEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents a 'automatic_base_change_failed' event on a given pull request.
+"""
+type AutomaticBaseChangeFailedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the AutomaticBaseChangeFailedEvent object
+  """
+  id: ID!
+
+  """
+  The new base for this PR
+  """
+  newBase: String!
+
+  """
+  The old base for this PR
+  """
+  oldBase: String!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+}
+
+"""
+Represents a 'automatic_base_change_succeeded' event on a given pull request.
+"""
+type AutomaticBaseChangeSucceededEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the AutomaticBaseChangeSucceededEvent object
+  """
+  id: ID!
+
+  """
+  The new base for this PR
+  """
+  newBase: String!
+
+  """
+  The old base for this PR
+  """
+  oldBase: String!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+}
+
+"""
+A (potentially binary) string encoded using base64.
+"""
+scalar Base64String
+
+"""
+Represents a 'base_ref_changed' event on a given issue or pull request.
+"""
+type BaseRefChangedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the name of the base ref for the pull request after it was changed.
+  """
+  currentRefName: String!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the BaseRefChangedEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the name of the base ref for the pull request before it was changed.
+  """
+  previousRefName: String!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+}
+
+"""
+Represents a 'base_ref_deleted' event on a given pull request.
+"""
+type BaseRefDeletedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the name of the Ref associated with the `base_ref_deleted` event.
+  """
+  baseRefName: String
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the BaseRefDeletedEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents a 'base_ref_force_pushed' event on a given pull request.
+"""
+type BaseRefForcePushedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the after commit SHA for the 'base_ref_force_pushed' event.
+  """
+  afterCommit: Commit
+
+  """
+  Identifies the before commit SHA for the 'base_ref_force_pushed' event.
+  """
+  beforeCommit: Commit
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the BaseRefForcePushedEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+
+  """
+  Identifies the fully qualified ref name for the 'base_ref_force_pushed' event.
+  """
+  ref: Ref
+}
+
+"""
+Represents non-fractional signed whole numeric values. Since the value may
+exceed the size of a 32-bit integer, it's encoded as a string.
+"""
+scalar BigInt
+
+"""
+Represents a Git blame.
+"""
+type Blame {
+  """
+  The list of ranges from a Git blame.
+  """
+  ranges: [BlameRange!]!
+}
+
+"""
+Represents a range of information from a Git blame.
+"""
+type BlameRange {
+  """
+  Identifies the recency of the change, from 1 (new) to 10 (old). This is
+  calculated as a 2-quantile and determines the length of distance between the
+  median age of all the changes in the file and the recency of the current
+  range's change.
+  """
+  age: Int!
+
+  """
+  Identifies the line author
+  """
+  commit: Commit!
+
+  """
+  The ending line for the range
+  """
+  endingLine: Int!
+
+  """
+  The starting line for the range
+  """
+  startingLine: Int!
+}
+
+"""
+Represents a Git blob.
+"""
+type Blob implements GitObject & Node {
+  """
+  An abbreviated version of the Git object ID
+  """
+  abbreviatedOid: String!
+
+  """
+  Byte size of Blob object
+  """
+  byteSize: Int!
+
+  """
+  The HTTP path for this Git object
+  """
+  commitResourcePath: URI!
+
+  """
+  The HTTP URL for this Git object
+  """
+  commitUrl: URI!
+
+  """
+  The Node ID of the Blob object
+  """
+  id: ID!
+
+  """
+  Indicates whether the Blob is binary or text. Returns null if unable to determine the encoding.
+  """
+  isBinary: Boolean
+
+  """
+  Indicates whether the contents is truncated
+  """
+  isTruncated: Boolean!
+
+  """
+  The Git object ID
+  """
+  oid: GitObjectID!
+
+  """
+  The Repository the Git object belongs to
+  """
+  repository: Repository!
+
+  """
+  UTF8 text data or null if the Blob is binary
+  """
+  text: String
+}
+
+"""
+A special type of user which takes actions on behalf of GitHub Apps.
+"""
+type Bot implements Actor & Node & UniformResourceLocatable {
+  """
+  A URL pointing to the GitHub App's public avatar.
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the Bot object
+  """
+  id: ID!
+
+  """
+  The username of the actor.
+  """
+  login: String!
+
+  """
+  The HTTP path for this bot
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this bot
+  """
+  url: URI!
+}
+
+"""
+Types which can be actors for `BranchActorAllowance` objects.
+"""
+union BranchActorAllowanceActor = App | Team | User
+
+"""
+Parameters to be used for the branch_name_pattern rule
+"""
+type BranchNamePatternParameters {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean!
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+Parameters to be used for the branch_name_pattern rule
+"""
+input BranchNamePatternParametersInput {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+A branch protection rule.
+"""
+type BranchProtectionRule implements Node {
+  """
+  Can this branch be deleted.
+  """
+  allowsDeletions: Boolean!
+
+  """
+  Are force pushes allowed on this branch.
+  """
+  allowsForcePushes: Boolean!
+
+  """
+  Is branch creation a protected operation.
+  """
+  blocksCreations: Boolean!
+
+  """
+  A list of conflicts matching branches protection rule and other branch protection rules
+  """
+  branchProtectionRuleConflicts(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): BranchProtectionRuleConflictConnection!
+
+  """
+  A list of actors able to force push for this branch protection rule.
+  """
+  bypassForcePushAllowances(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): BypassForcePushAllowanceConnection!
+
+  """
+  A list of actors able to bypass PRs for this branch protection rule.
+  """
+  bypassPullRequestAllowances(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): BypassPullRequestAllowanceConnection!
+
+  """
+  The actor who created this branch protection rule.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  Will new commits pushed to matching branches dismiss pull request review approvals.
+  """
+  dismissesStaleReviews: Boolean!
+
+  """
+  The Node ID of the BranchProtectionRule object
+  """
+  id: ID!
+
+  """
+  Can admins overwrite branch protection.
+  """
+  isAdminEnforced: Boolean!
+
+  """
+  Whether users can pull changes from upstream when the branch is locked. Set to
+  `true` to allow fork syncing. Set to `false` to prevent fork syncing.
+  """
+  lockAllowsFetchAndMerge: Boolean!
+
+  """
+  Whether to set the branch as read-only. If this is true, users will not be able to push to the branch.
+  """
+  lockBranch: Boolean!
+
+  """
+  Repository refs that are protected by this rule
+  """
+  matchingRefs(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filters refs with query on name
+    """
+    query: String
+  ): RefConnection!
+
+  """
+  Identifies the protection rule pattern.
+  """
+  pattern: String!
+
+  """
+  A list push allowances for this branch protection rule.
+  """
+  pushAllowances(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PushAllowanceConnection!
+
+  """
+  The repository associated with this branch protection rule.
+  """
+  repository: Repository
+
+  """
+  Whether the most recent push must be approved by someone other than the person who pushed it
+  """
+  requireLastPushApproval: Boolean!
+
+  """
+  Number of approving reviews required to update matching branches.
+  """
+  requiredApprovingReviewCount: Int
+
+  """
+  List of required deployment environments that must be deployed successfully to update matching branches
+  """
+  requiredDeploymentEnvironments: [String]
+
+  """
+  List of required status check contexts that must pass for commits to be accepted to matching branches.
+  """
+  requiredStatusCheckContexts: [String]
+
+  """
+  List of required status checks that must pass for commits to be accepted to matching branches.
+  """
+  requiredStatusChecks: [RequiredStatusCheckDescription!]
+
+  """
+  Are approving reviews required to update matching branches.
+  """
+  requiresApprovingReviews: Boolean!
+
+  """
+  Are reviews from code owners required to update matching branches.
+  """
+  requiresCodeOwnerReviews: Boolean!
+
+  """
+  Are commits required to be signed.
+  """
+  requiresCommitSignatures: Boolean!
+
+  """
+  Are conversations required to be resolved before merging.
+  """
+  requiresConversationResolution: Boolean!
+
+  """
+  Does this branch require deployment to specific environments before merging
+  """
+  requiresDeployments: Boolean!
+
+  """
+  Are merge commits prohibited from being pushed to this branch.
+  """
+  requiresLinearHistory: Boolean!
+
+  """
+  Are status checks required to update matching branches.
+  """
+  requiresStatusChecks: Boolean!
+
+  """
+  Are branches required to be up to date before merging.
+  """
+  requiresStrictStatusChecks: Boolean!
+
+  """
+  Is pushing to matching branches restricted.
+  """
+  restrictsPushes: Boolean!
+
+  """
+  Is dismissal of pull request reviews restricted.
+  """
+  restrictsReviewDismissals: Boolean!
+
+  """
+  A list review dismissal allowances for this branch protection rule.
+  """
+  reviewDismissalAllowances(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ReviewDismissalAllowanceConnection!
+}
+
+"""
+A conflict between two branch protection rules.
+"""
+type BranchProtectionRuleConflict {
+  """
+  Identifies the branch protection rule.
+  """
+  branchProtectionRule: BranchProtectionRule
+
+  """
+  Identifies the conflicting branch protection rule.
+  """
+  conflictingBranchProtectionRule: BranchProtectionRule
+
+  """
+  Identifies the branch ref that has conflicting rules
+  """
+  ref: Ref
+}
+
+"""
+The connection type for BranchProtectionRuleConflict.
+"""
+type BranchProtectionRuleConflictConnection {
+  """
+  A list of edges.
+  """
+  edges: [BranchProtectionRuleConflictEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [BranchProtectionRuleConflict]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type BranchProtectionRuleConflictEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: BranchProtectionRuleConflict
+}
+
+"""
+The connection type for BranchProtectionRule.
+"""
+type BranchProtectionRuleConnection {
+  """
+  A list of edges.
+  """
+  edges: [BranchProtectionRuleEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [BranchProtectionRule]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type BranchProtectionRuleEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: BranchProtectionRule
+}
+
+"""
+Information about a sponsorship to make for a user or organization with a GitHub
+Sponsors profile, as part of sponsoring many users or organizations at once.
+"""
+input BulkSponsorship {
+  """
+  The amount to pay to the sponsorable in US dollars. Valid values: 1-12000.
+  """
+  amount: Int!
+
+  """
+  The ID of the user or organization who is receiving the sponsorship. Required if sponsorableLogin is not given.
+  """
+  sponsorableId: ID @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "Sponsorable")
+
+  """
+  The username of the user or organization who is receiving the sponsorship. Required if sponsorableId is not given.
+  """
+  sponsorableLogin: String
+}
+
+"""
+Types that can represent a repository ruleset bypass actor.
+"""
+union BypassActor = App | Team
+
+"""
+A user, team, or app who has the ability to bypass a force push requirement on a protected branch.
+"""
+type BypassForcePushAllowance implements Node {
+  """
+  The actor that can force push.
+  """
+  actor: BranchActorAllowanceActor
+
+  """
+  Identifies the branch protection rule associated with the allowed user, team, or app.
+  """
+  branchProtectionRule: BranchProtectionRule
+
+  """
+  The Node ID of the BypassForcePushAllowance object
+  """
+  id: ID!
+}
+
+"""
+The connection type for BypassForcePushAllowance.
+"""
+type BypassForcePushAllowanceConnection {
+  """
+  A list of edges.
+  """
+  edges: [BypassForcePushAllowanceEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [BypassForcePushAllowance]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type BypassForcePushAllowanceEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: BypassForcePushAllowance
+}
+
+"""
+A user, team, or app who has the ability to bypass a pull request requirement on a protected branch.
+"""
+type BypassPullRequestAllowance implements Node {
+  """
+  The actor that can bypass.
+  """
+  actor: BranchActorAllowanceActor
+
+  """
+  Identifies the branch protection rule associated with the allowed user, team, or app.
+  """
+  branchProtectionRule: BranchProtectionRule
+
+  """
+  The Node ID of the BypassPullRequestAllowance object
+  """
+  id: ID!
+}
+
+"""
+The connection type for BypassPullRequestAllowance.
+"""
+type BypassPullRequestAllowanceConnection {
+  """
+  A list of edges.
+  """
+  edges: [BypassPullRequestAllowanceEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [BypassPullRequestAllowance]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type BypassPullRequestAllowanceEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: BypassPullRequestAllowance
+}
+
+"""
+The Common Vulnerability Scoring System
+"""
+type CVSS {
+  """
+  The CVSS score associated with this advisory
+  """
+  score: Float!
+
+  """
+  The CVSS vector string associated with this advisory
+  """
+  vectorString: String
+}
+
+"""
+A common weakness enumeration
+"""
+type CWE implements Node {
+  """
+  The id of the CWE
+  """
+  cweId: String!
+
+  """
+  A detailed description of this CWE
+  """
+  description: String!
+
+  """
+  The Node ID of the CWE object
+  """
+  id: ID!
+
+  """
+  The name of this CWE
+  """
+  name: String!
+}
+
+"""
+The connection type for CWE.
+"""
+type CWEConnection {
+  """
+  A list of edges.
+  """
+  edges: [CWEEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CWE]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CWEEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CWE
+}
+
+"""
+Autogenerated input type of CancelEnterpriseAdminInvitation
+"""
+input CancelEnterpriseAdminInvitationInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the pending enterprise administrator invitation.
+  """
+  invitationId: ID! @possibleTypes(concreteTypes: ["EnterpriseAdministratorInvitation"])
+}
+
+"""
+Autogenerated return type of CancelEnterpriseAdminInvitation
+"""
+type CancelEnterpriseAdminInvitationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The invitation that was canceled.
+  """
+  invitation: EnterpriseAdministratorInvitation
+
+  """
+  A message confirming the result of canceling an administrator invitation.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of CancelSponsorship
+"""
+input CancelSponsorshipInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the user or organization who is acting as the sponsor, paying for
+  the sponsorship. Required if sponsorLogin is not given.
+  """
+  sponsorId: ID @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "Sponsor")
+
+  """
+  The username of the user or organization who is acting as the sponsor, paying
+  for the sponsorship. Required if sponsorId is not given.
+  """
+  sponsorLogin: String
+
+  """
+  The ID of the user or organization who is receiving the sponsorship. Required if sponsorableLogin is not given.
+  """
+  sponsorableId: ID @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "Sponsorable")
+
+  """
+  The username of the user or organization who is receiving the sponsorship. Required if sponsorableId is not given.
+  """
+  sponsorableLogin: String
+}
+
+"""
+Autogenerated return type of CancelSponsorship
+"""
+type CancelSponsorshipPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The tier that was being used at the time of cancellation.
+  """
+  sponsorsTier: SponsorsTier
+}
+
+"""
+Autogenerated input type of ChangeUserStatus
+"""
+input ChangeUserStatusInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The emoji to represent your status. Can either be a native Unicode emoji or an emoji name with colons, e.g., :grinning:.
+  """
+  emoji: String
+
+  """
+  If set, the user status will not be shown after this date.
+  """
+  expiresAt: DateTime
+
+  """
+  Whether this status should indicate you are not fully available on GitHub, e.g., you are away.
+  """
+  limitedAvailability: Boolean = false
+
+  """
+  A short description of your current status.
+  """
+  message: String
+
+  """
+  The ID of the organization whose members will be allowed to see the status. If
+  omitted, the status will be publicly visible.
+  """
+  organizationId: ID @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of ChangeUserStatus
+"""
+type ChangeUserStatusPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Your updated status.
+  """
+  status: UserStatus
+}
+
+"""
+A single check annotation.
+"""
+type CheckAnnotation {
+  """
+  The annotation's severity level.
+  """
+  annotationLevel: CheckAnnotationLevel
+
+  """
+  The path to the file that this annotation was made on.
+  """
+  blobUrl: URI!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The position of this annotation.
+  """
+  location: CheckAnnotationSpan!
+
+  """
+  The annotation's message.
+  """
+  message: String!
+
+  """
+  The path that this annotation was made on.
+  """
+  path: String!
+
+  """
+  Additional information about the annotation.
+  """
+  rawDetails: String
+
+  """
+  The annotation's title
+  """
+  title: String
+}
+
+"""
+The connection type for CheckAnnotation.
+"""
+type CheckAnnotationConnection {
+  """
+  A list of edges.
+  """
+  edges: [CheckAnnotationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CheckAnnotation]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Information from a check run analysis to specific lines of code.
+"""
+input CheckAnnotationData {
+  """
+  Represents an annotation's information level
+  """
+  annotationLevel: CheckAnnotationLevel!
+
+  """
+  The location of the annotation
+  """
+  location: CheckAnnotationRange!
+
+  """
+  A short description of the feedback for these lines of code.
+  """
+  message: String!
+
+  """
+  The path of the file to add an annotation to.
+  """
+  path: String!
+
+  """
+  Details about this annotation.
+  """
+  rawDetails: String
+
+  """
+  The title that represents the annotation.
+  """
+  title: String
+}
+
+"""
+An edge in a connection.
+"""
+type CheckAnnotationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CheckAnnotation
+}
+
+"""
+Represents an annotation's information level.
+"""
+enum CheckAnnotationLevel {
+  """
+  An annotation indicating an inescapable error.
+  """
+  FAILURE
+
+  """
+  An annotation indicating some information.
+  """
+  NOTICE
+
+  """
+  An annotation indicating an ignorable error.
+  """
+  WARNING
+}
+
+"""
+A character position in a check annotation.
+"""
+type CheckAnnotationPosition {
+  """
+  Column number (1 indexed).
+  """
+  column: Int
+
+  """
+  Line number (1 indexed).
+  """
+  line: Int!
+}
+
+"""
+Information from a check run analysis to specific lines of code.
+"""
+input CheckAnnotationRange {
+  """
+  The ending column of the range.
+  """
+  endColumn: Int
+
+  """
+  The ending line of the range.
+  """
+  endLine: Int!
+
+  """
+  The starting column of the range.
+  """
+  startColumn: Int
+
+  """
+  The starting line of the range.
+  """
+  startLine: Int!
+}
+
+"""
+An inclusive pair of positions for a check annotation.
+"""
+type CheckAnnotationSpan {
+  """
+  End position (inclusive).
+  """
+  end: CheckAnnotationPosition!
+
+  """
+  Start position (inclusive).
+  """
+  start: CheckAnnotationPosition!
+}
+
+"""
+The possible states for a check suite or run conclusion.
+"""
+enum CheckConclusionState {
+  """
+  The check suite or run requires action.
+  """
+  ACTION_REQUIRED
+
+  """
+  The check suite or run has been cancelled.
+  """
+  CANCELLED
+
+  """
+  The check suite or run has failed.
+  """
+  FAILURE
+
+  """
+  The check suite or run was neutral.
+  """
+  NEUTRAL
+
+  """
+  The check suite or run was skipped.
+  """
+  SKIPPED
+
+  """
+  The check suite or run was marked stale by GitHub. Only GitHub can use this conclusion.
+  """
+  STALE
+
+  """
+  The check suite or run has failed at startup.
+  """
+  STARTUP_FAILURE
+
+  """
+  The check suite or run has succeeded.
+  """
+  SUCCESS
+
+  """
+  The check suite or run has timed out.
+  """
+  TIMED_OUT
+}
+
+"""
+A check run.
+"""
+type CheckRun implements Node & RequirableByPullRequest & UniformResourceLocatable {
+  """
+  The check run's annotations
+  """
+  annotations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CheckAnnotationConnection
+
+  """
+  The check suite that this run is a part of.
+  """
+  checkSuite: CheckSuite!
+
+  """
+  Identifies the date and time when the check run was completed.
+  """
+  completedAt: DateTime
+
+  """
+  The conclusion of the check run.
+  """
+  conclusion: CheckConclusionState
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The corresponding deployment for this job, if any
+  """
+  deployment: Deployment
+
+  """
+  The URL from which to find full details of the check run on the integrator's site.
+  """
+  detailsUrl: URI
+
+  """
+  A reference for the check run on the integrator's system.
+  """
+  externalId: String
+
+  """
+  The Node ID of the CheckRun object
+  """
+  id: ID!
+
+  """
+  Whether this is required to pass before merging for a specific pull request.
+  """
+  isRequired(
+    """
+    The id of the pull request this is required for
+    """
+    pullRequestId: ID
+
+    """
+    The number of the pull request this is required for
+    """
+    pullRequestNumber: Int
+  ): Boolean!
+
+  """
+  The name of the check for this check run.
+  """
+  name: String!
+
+  """
+  Information about a pending deployment, if any, in this check run
+  """
+  pendingDeploymentRequest: DeploymentRequest
+
+  """
+  The permalink to the check run summary.
+  """
+  permalink: URI!
+
+  """
+  The repository associated with this check run.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this check run.
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the date and time when the check run was started.
+  """
+  startedAt: DateTime
+
+  """
+  The current status of the check run.
+  """
+  status: CheckStatusState!
+
+  """
+  The check run's steps
+  """
+  steps(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Step number
+    """
+    number: Int
+  ): CheckStepConnection
+
+  """
+  A string representing the check run's summary
+  """
+  summary: String
+
+  """
+  A string representing the check run's text
+  """
+  text: String
+
+  """
+  A string representing the check run
+  """
+  title: String
+
+  """
+  The HTTP URL for this check run.
+  """
+  url: URI!
+}
+
+"""
+Possible further actions the integrator can perform.
+"""
+input CheckRunAction {
+  """
+  A short explanation of what this action would do.
+  """
+  description: String!
+
+  """
+  A reference for the action on the integrator's system.
+  """
+  identifier: String!
+
+  """
+  The text to be displayed on a button in the web UI.
+  """
+  label: String!
+}
+
+"""
+The connection type for CheckRun.
+"""
+type CheckRunConnection {
+  """
+  A list of edges.
+  """
+  edges: [CheckRunEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CheckRun]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CheckRunEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CheckRun
+}
+
+"""
+The filters that are available when fetching check runs.
+"""
+input CheckRunFilter {
+  """
+  Filters the check runs created by this application ID.
+  """
+  appId: Int
+
+  """
+  Filters the check runs by this name.
+  """
+  checkName: String
+
+  """
+  Filters the check runs by this type.
+  """
+  checkType: CheckRunType
+
+  """
+  Filters the check runs by these conclusions.
+  """
+  conclusions: [CheckConclusionState!]
+
+  """
+  Filters the check runs by this status. Superseded by statuses.
+  """
+  status: CheckStatusState
+
+  """
+  Filters the check runs by this status. Overrides status.
+  """
+  statuses: [CheckStatusState!]
+}
+
+"""
+Descriptive details about the check run.
+"""
+input CheckRunOutput {
+  """
+  The annotations that are made as part of the check run.
+  """
+  annotations: [CheckAnnotationData!]
+
+  """
+  Images attached to the check run output displayed in the GitHub pull request UI.
+  """
+  images: [CheckRunOutputImage!]
+
+  """
+  The summary of the check run (supports Commonmark).
+  """
+  summary: String!
+
+  """
+  The details of the check run (supports Commonmark).
+  """
+  text: String
+
+  """
+  A title to provide for this check run.
+  """
+  title: String!
+}
+
+"""
+Images attached to the check run output displayed in the GitHub pull request UI.
+"""
+input CheckRunOutputImage {
+  """
+  The alternative text for the image.
+  """
+  alt: String!
+
+  """
+  A short image description.
+  """
+  caption: String
+
+  """
+  The full URL of the image.
+  """
+  imageUrl: URI!
+}
+
+"""
+The possible states of a check run in a status rollup.
+"""
+enum CheckRunState {
+  """
+  The check run requires action.
+  """
+  ACTION_REQUIRED
+
+  """
+  The check run has been cancelled.
+  """
+  CANCELLED
+
+  """
+  The check run has been completed.
+  """
+  COMPLETED
+
+  """
+  The check run has failed.
+  """
+  FAILURE
+
+  """
+  The check run is in progress.
+  """
+  IN_PROGRESS
+
+  """
+  The check run was neutral.
+  """
+  NEUTRAL
+
+  """
+  The check run is in pending state.
+  """
+  PENDING
+
+  """
+  The check run has been queued.
+  """
+  QUEUED
+
+  """
+  The check run was skipped.
+  """
+  SKIPPED
+
+  """
+  The check run was marked stale by GitHub. Only GitHub can use this conclusion.
+  """
+  STALE
+
+  """
+  The check run has failed at startup.
+  """
+  STARTUP_FAILURE
+
+  """
+  The check run has succeeded.
+  """
+  SUCCESS
+
+  """
+  The check run has timed out.
+  """
+  TIMED_OUT
+
+  """
+  The check run is in waiting state.
+  """
+  WAITING
+}
+
+"""
+Represents a count of the state of a check run.
+"""
+type CheckRunStateCount {
+  """
+  The number of check runs with this state.
+  """
+  count: Int!
+
+  """
+  The state of a check run.
+  """
+  state: CheckRunState!
+}
+
+"""
+The possible types of check runs.
+"""
+enum CheckRunType {
+  """
+  Every check run available.
+  """
+  ALL
+
+  """
+  The latest check run.
+  """
+  LATEST
+}
+
+"""
+The possible states for a check suite or run status.
+"""
+enum CheckStatusState {
+  """
+  The check suite or run has been completed.
+  """
+  COMPLETED
+
+  """
+  The check suite or run is in progress.
+  """
+  IN_PROGRESS
+
+  """
+  The check suite or run is in pending state.
+  """
+  PENDING
+
+  """
+  The check suite or run has been queued.
+  """
+  QUEUED
+
+  """
+  The check suite or run has been requested.
+  """
+  REQUESTED
+
+  """
+  The check suite or run is in waiting state.
+  """
+  WAITING
+}
+
+"""
+A single check step.
+"""
+type CheckStep {
+  """
+  Identifies the date and time when the check step was completed.
+  """
+  completedAt: DateTime
+
+  """
+  The conclusion of the check step.
+  """
+  conclusion: CheckConclusionState
+
+  """
+  A reference for the check step on the integrator's system.
+  """
+  externalId: String
+
+  """
+  The step's name.
+  """
+  name: String!
+
+  """
+  The index of the step in the list of steps of the parent check run.
+  """
+  number: Int!
+
+  """
+  Number of seconds to completion.
+  """
+  secondsToCompletion: Int
+
+  """
+  Identifies the date and time when the check step was started.
+  """
+  startedAt: DateTime
+
+  """
+  The current status of the check step.
+  """
+  status: CheckStatusState!
+}
+
+"""
+The connection type for CheckStep.
+"""
+type CheckStepConnection {
+  """
+  A list of edges.
+  """
+  edges: [CheckStepEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CheckStep]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CheckStepEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CheckStep
+}
+
+"""
+A check suite.
+"""
+type CheckSuite implements Node {
+  """
+  The GitHub App which created this check suite.
+  """
+  app: App
+
+  """
+  The name of the branch for this check suite.
+  """
+  branch: Ref
+
+  """
+  The check runs associated with a check suite.
+  """
+  checkRuns(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Filters the check runs by this type.
+    """
+    filterBy: CheckRunFilter
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CheckRunConnection
+
+  """
+  The commit for this check suite
+  """
+  commit: Commit!
+
+  """
+  The conclusion of this check suite.
+  """
+  conclusion: CheckConclusionState
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The user who triggered the check suite.
+  """
+  creator: User
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the CheckSuite object
+  """
+  id: ID!
+
+  """
+  A list of open pull requests matching the check suite.
+  """
+  matchingPullRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    The base ref name to filter the pull requests by.
+    """
+    baseRefName: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    The head ref name to filter the pull requests by.
+    """
+    headRefName: String
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pull requests returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the pull requests by.
+    """
+    states: [PullRequestState!]
+  ): PullRequestConnection
+
+  """
+  The push that triggered this check suite.
+  """
+  push: Push
+
+  """
+  The repository associated with this check suite.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this check suite
+  """
+  resourcePath: URI!
+
+  """
+  The status of this check suite.
+  """
+  status: CheckStatusState!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this check suite
+  """
+  url: URI!
+
+  """
+  The workflow run associated with this check suite.
+  """
+  workflowRun: WorkflowRun
+}
+
+"""
+The auto-trigger preferences that are available for check suites.
+"""
+input CheckSuiteAutoTriggerPreference {
+  """
+  The node ID of the application that owns the check suite.
+  """
+  appId: ID!
+
+  """
+  Set to `true` to enable automatic creation of CheckSuite events upon pushes to the repository.
+  """
+  setting: Boolean!
+}
+
+"""
+The connection type for CheckSuite.
+"""
+type CheckSuiteConnection {
+  """
+  A list of edges.
+  """
+  edges: [CheckSuiteEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CheckSuite]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CheckSuiteEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CheckSuite
+}
+
+"""
+The filters that are available when fetching check suites.
+"""
+input CheckSuiteFilter {
+  """
+  Filters the check suites created by this application ID.
+  """
+  appId: Int
+
+  """
+  Filters the check suites by this name.
+  """
+  checkName: String
+}
+
+"""
+An object which can have its data claimed or claim data from another.
+"""
+union Claimable = Mannequin | User
+
+"""
+Autogenerated input type of ClearLabelsFromLabelable
+"""
+input ClearLabelsFromLabelableInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The id of the labelable object to clear the labels from.
+  """
+  labelableId: ID! @possibleTypes(concreteTypes: ["Discussion", "Issue", "PullRequest"], abstractType: "Labelable")
+}
+
+"""
+Autogenerated return type of ClearLabelsFromLabelable
+"""
+type ClearLabelsFromLabelablePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The item that was unlabeled.
+  """
+  labelable: Labelable
+}
+
+"""
+Autogenerated input type of ClearProjectV2ItemFieldValue
+"""
+input ClearProjectV2ItemFieldValueInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the field to be cleared.
+  """
+  fieldId: ID!
+    @possibleTypes(
+      concreteTypes: ["ProjectV2Field", "ProjectV2IterationField", "ProjectV2SingleSelectField"]
+      abstractType: "ProjectV2FieldConfiguration"
+    )
+
+  """
+  The ID of the item to be cleared.
+  """
+  itemId: ID! @possibleTypes(concreteTypes: ["ProjectV2Item"])
+
+  """
+  The ID of the Project.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated return type of ClearProjectV2ItemFieldValue
+"""
+type ClearProjectV2ItemFieldValuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated item.
+  """
+  projectV2Item: ProjectV2Item
+}
+
+"""
+Autogenerated input type of CloneProject
+"""
+input CloneProjectInput {
+  """
+  The description of the project.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Whether or not to clone the source project's workflows.
+  """
+  includeWorkflows: Boolean!
+
+  """
+  The name of the project.
+  """
+  name: String!
+
+  """
+  The visibility of the project, defaults to false (private).
+  """
+  public: Boolean
+
+  """
+  The source project to clone.
+  """
+  sourceId: ID! @possibleTypes(concreteTypes: ["Project"])
+
+  """
+  The owner ID to create the project under.
+  """
+  targetOwnerId: ID! @possibleTypes(concreteTypes: ["Organization", "Repository", "User"], abstractType: "ProjectOwner")
+}
+
+"""
+Autogenerated return type of CloneProject
+"""
+type CloneProjectPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The id of the JobStatus for populating cloned fields.
+  """
+  jobStatusId: String
+
+  """
+  The new cloned project.
+  """
+  project: Project
+}
+
+"""
+Autogenerated input type of CloneTemplateRepository
+"""
+input CloneTemplateRepositoryInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A short description of the new repository.
+  """
+  description: String
+
+  """
+  Whether to copy all branches from the template to the new repository. Defaults
+  to copying only the default branch of the template.
+  """
+  includeAllBranches: Boolean = false
+
+  """
+  The name of the new repository.
+  """
+  name: String!
+
+  """
+  The ID of the owner for the new repository.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "RepositoryOwner")
+
+  """
+  The Node ID of the template repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  Indicates the repository's visibility level.
+  """
+  visibility: RepositoryVisibility!
+}
+
+"""
+Autogenerated return type of CloneTemplateRepository
+"""
+type CloneTemplateRepositoryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new repository.
+  """
+  repository: Repository
+}
+
+"""
+An object that can be closed
+"""
+interface Closable {
+  """
+  Indicates if the object is closed (definition of closed may depend on type)
+  """
+  closed: Boolean!
+
+  """
+  Identifies the date and time when the object was closed.
+  """
+  closedAt: DateTime
+
+  """
+  Indicates if the object can be closed by the viewer.
+  """
+  viewerCanClose: Boolean!
+
+  """
+  Indicates if the object can be reopened by the viewer.
+  """
+  viewerCanReopen: Boolean!
+}
+
+"""
+Autogenerated input type of CloseDiscussion
+"""
+input CloseDiscussionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the discussion to be closed.
+  """
+  discussionId: ID! @possibleTypes(concreteTypes: ["Discussion"])
+
+  """
+  The reason why the discussion is being closed.
+  """
+  reason: DiscussionCloseReason = RESOLVED
+}
+
+"""
+Autogenerated return type of CloseDiscussion
+"""
+type CloseDiscussionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The discussion that was closed.
+  """
+  discussion: Discussion
+}
+
+"""
+Autogenerated input type of CloseIssue
+"""
+input CloseIssueInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the issue to be closed.
+  """
+  issueId: ID! @possibleTypes(concreteTypes: ["Issue"])
+
+  """
+  The reason the issue is to be closed.
+  """
+  stateReason: IssueClosedStateReason
+}
+
+"""
+Autogenerated return type of CloseIssue
+"""
+type CloseIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The issue that was closed.
+  """
+  issue: Issue
+}
+
+"""
+Autogenerated input type of ClosePullRequest
+"""
+input ClosePullRequestInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the pull request to be closed.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of ClosePullRequest
+"""
+type ClosePullRequestPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request that was closed.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents a 'closed' event on any `Closable`.
+"""
+type ClosedEvent implements Node & UniformResourceLocatable {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Object that was closed.
+  """
+  closable: Closable!
+
+  """
+  Object which triggered the creation of this event.
+  """
+  closer: Closer
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the ClosedEvent object
+  """
+  id: ID!
+
+  """
+  The HTTP path for this closed event.
+  """
+  resourcePath: URI!
+
+  """
+  The reason the issue state was changed to closed.
+  """
+  stateReason: IssueStateReason
+
+  """
+  The HTTP URL for this closed event.
+  """
+  url: URI!
+}
+
+"""
+The object which triggered a `ClosedEvent`.
+"""
+union Closer = Commit | PullRequest
+
+"""
+The Code of Conduct for a repository
+"""
+type CodeOfConduct implements Node {
+  """
+  The body of the Code of Conduct
+  """
+  body: String
+
+  """
+  The Node ID of the CodeOfConduct object
+  """
+  id: ID!
+
+  """
+  The key for the Code of Conduct
+  """
+  key: String!
+
+  """
+  The formal name of the Code of Conduct
+  """
+  name: String!
+
+  """
+  The HTTP path for this Code of Conduct
+  """
+  resourcePath: URI
+
+  """
+  The HTTP URL for this Code of Conduct
+  """
+  url: URI
+}
+
+"""
+Collaborators affiliation level with a subject.
+"""
+enum CollaboratorAffiliation {
+  """
+  All collaborators the authenticated user can see.
+  """
+  ALL
+
+  """
+  All collaborators with permissions to an organization-owned subject, regardless of organization membership status.
+  """
+  DIRECT
+
+  """
+  All outside collaborators of an organization-owned subject.
+  """
+  OUTSIDE
+}
+
+"""
+Represents a comment.
+"""
+interface Comment {
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the subject of the comment.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  The body as Markdown.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body rendered to text.
+  """
+  bodyText: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  The Node ID of the Comment object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+}
+
+"""
+A comment author association with repository.
+"""
+enum CommentAuthorAssociation {
+  """
+  Author has been invited to collaborate on the repository.
+  """
+  COLLABORATOR
+
+  """
+  Author has previously committed to the repository.
+  """
+  CONTRIBUTOR
+
+  """
+  Author has not previously committed to GitHub.
+  """
+  FIRST_TIMER
+
+  """
+  Author has not previously committed to the repository.
+  """
+  FIRST_TIME_CONTRIBUTOR
+
+  """
+  Author is a placeholder for an unclaimed user.
+  """
+  MANNEQUIN
+
+  """
+  Author is a member of the organization that owns the repository.
+  """
+  MEMBER
+
+  """
+  Author has no association with the repository.
+  """
+  NONE
+
+  """
+  Author is the owner of the repository.
+  """
+  OWNER
+}
+
+"""
+The possible errors that will prevent a user from updating a comment.
+"""
+enum CommentCannotUpdateReason {
+  """
+  Unable to create comment because repository is archived.
+  """
+  ARCHIVED
+
+  """
+  You cannot update this comment
+  """
+  DENIED
+
+  """
+  You must be the author or have write access to this repository to update this comment.
+  """
+  INSUFFICIENT_ACCESS
+
+  """
+  Unable to create comment because issue is locked.
+  """
+  LOCKED
+
+  """
+  You must be logged in to update this comment.
+  """
+  LOGIN_REQUIRED
+
+  """
+  Repository is under maintenance.
+  """
+  MAINTENANCE
+
+  """
+  At least one email address must be verified to update this comment.
+  """
+  VERIFIED_EMAIL_REQUIRED
+}
+
+"""
+Represents a 'comment_deleted' event on a given issue or pull request.
+"""
+type CommentDeletedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The user who authored the deleted comment.
+  """
+  deletedCommentAuthor: Actor
+
+  """
+  The Node ID of the CommentDeletedEvent object
+  """
+  id: ID!
+}
+
+"""
+Represents a Git commit.
+"""
+type Commit implements GitObject & Node & Subscribable & UniformResourceLocatable {
+  """
+  An abbreviated version of the Git object ID
+  """
+  abbreviatedOid: String!
+
+  """
+  The number of additions in this commit.
+  """
+  additions: Int!
+
+  """
+  The merged Pull Request that introduced the commit to the repository. If the
+  commit is not present in the default branch, additionally returns open Pull
+  Requests associated with the commit
+  """
+  associatedPullRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pull requests.
+    """
+    orderBy: PullRequestOrder = {field: CREATED_AT, direction: ASC}
+  ): PullRequestConnection
+
+  """
+  Authorship details of the commit.
+  """
+  author: GitActor
+
+  """
+  Check if the committer and the author match.
+  """
+  authoredByCommitter: Boolean!
+
+  """
+  The datetime when this commit was authored.
+  """
+  authoredDate: DateTime!
+
+  """
+  The list of authors for this commit based on the git author and the Co-authored-by
+  message trailer. The git author will always be first.
+  """
+  authors(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): GitActorConnection!
+
+  """
+  Fetches `git blame` information.
+  """
+  blame(
+    """
+    The file whose Git blame information you want.
+    """
+    path: String!
+  ): Blame!
+
+  """
+  We recommend using the `changedFilesIfAvailable` field instead of
+  `changedFiles`, as `changedFiles` will cause your request to return an error
+  if GitHub is unable to calculate the number of changed files.
+  """
+  changedFiles: Int!
+    @deprecated(
+      reason: "`changedFiles` will be removed. Use `changedFilesIfAvailable` instead. Removal on 2023-01-01 UTC."
+    )
+
+  """
+  The number of changed files in this commit. If GitHub is unable to calculate
+  the number of changed files (for example due to a timeout), this will return
+  `null`. We recommend using this field instead of `changedFiles`.
+  """
+  changedFilesIfAvailable: Int
+
+  """
+  The check suites associated with a commit.
+  """
+  checkSuites(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Filters the check suites by this type.
+    """
+    filterBy: CheckSuiteFilter
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CheckSuiteConnection
+
+  """
+  Comments made on the commit.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CommitCommentConnection!
+
+  """
+  The HTTP path for this Git object
+  """
+  commitResourcePath: URI!
+
+  """
+  The HTTP URL for this Git object
+  """
+  commitUrl: URI!
+
+  """
+  The datetime when this commit was committed.
+  """
+  committedDate: DateTime!
+
+  """
+  Check if committed via GitHub web UI.
+  """
+  committedViaWeb: Boolean!
+
+  """
+  Committer details of the commit.
+  """
+  committer: GitActor
+
+  """
+  The number of deletions in this commit.
+  """
+  deletions: Int!
+
+  """
+  The deployments associated with a commit.
+  """
+  deployments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Environments to list deployments for
+    """
+    environments: [String!]
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for deployments returned from the connection.
+    """
+    orderBy: DeploymentOrder = {field: CREATED_AT, direction: ASC}
+  ): DeploymentConnection
+
+  """
+  The tree entry representing the file located at the given path.
+  """
+  file(
+    """
+    The path for the file
+    """
+    path: String!
+  ): TreeEntry
+
+  """
+  The linear commit history starting from (and including) this commit, in the same order as `git log`.
+  """
+  history(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    If non-null, filters history to only show commits with matching authorship.
+    """
+    author: CommitAuthor
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    If non-null, filters history to only show commits touching files under this path.
+    """
+    path: String
+
+    """
+    Allows specifying a beginning time or date for fetching commits.
+    """
+    since: GitTimestamp
+
+    """
+    Allows specifying an ending time or date for fetching commits.
+    """
+    until: GitTimestamp
+  ): CommitHistoryConnection!
+
+  """
+  The Node ID of the Commit object
+  """
+  id: ID!
+
+  """
+  The Git commit message
+  """
+  message: String!
+
+  """
+  The Git commit message body
+  """
+  messageBody: String!
+
+  """
+  The commit message body rendered to HTML.
+  """
+  messageBodyHTML: HTML!
+
+  """
+  The Git commit message headline
+  """
+  messageHeadline: String!
+
+  """
+  The commit message headline rendered to HTML.
+  """
+  messageHeadlineHTML: HTML!
+
+  """
+  The Git object ID
+  """
+  oid: GitObjectID!
+
+  """
+  The organization this commit was made on behalf of.
+  """
+  onBehalfOf: Organization
+
+  """
+  The parents of a commit.
+  """
+  parents(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CommitConnection!
+
+  """
+  The datetime when this commit was pushed.
+  """
+  pushedDate: DateTime @deprecated(reason: "`pushedDate` is no longer supported. Removal on 2023-07-01 UTC.")
+
+  """
+  The Repository this commit belongs to
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this commit
+  """
+  resourcePath: URI!
+
+  """
+  Commit signing information, if present.
+  """
+  signature: GitSignature
+
+  """
+  Status information for this commit
+  """
+  status: Status
+
+  """
+  Check and Status rollup information for this commit.
+  """
+  statusCheckRollup: StatusCheckRollup
+
+  """
+  Returns a list of all submodules in this repository as of this Commit parsed from the .gitmodules file.
+  """
+  submodules(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): SubmoduleConnection!
+
+  """
+  Returns a URL to download a tarball archive for a repository.
+  Note: For private repositories, these links are temporary and expire after five minutes.
+  """
+  tarballUrl: URI!
+
+  """
+  Commit's root Tree
+  """
+  tree: Tree!
+
+  """
+  The HTTP path for the tree of this commit
+  """
+  treeResourcePath: URI!
+
+  """
+  The HTTP URL for the tree of this commit
+  """
+  treeUrl: URI!
+
+  """
+  The HTTP URL for this commit
+  """
+  url: URI!
+
+  """
+  Check if the viewer is able to change their subscription status for the repository.
+  """
+  viewerCanSubscribe: Boolean!
+
+  """
+  Identifies if the viewer is watching, not watching, or ignoring the subscribable entity.
+  """
+  viewerSubscription: SubscriptionState
+
+  """
+  Returns a URL to download a zipball archive for a repository.
+  Note: For private repositories, these links are temporary and expire after five minutes.
+  """
+  zipballUrl: URI!
+}
+
+"""
+Specifies an author for filtering Git commits.
+"""
+input CommitAuthor {
+  """
+  Email addresses to filter by. Commits authored by any of the specified email addresses will be returned.
+  """
+  emails: [String!]
+
+  """
+  ID of a User to filter by. If non-null, only commits authored by this user
+  will be returned. This field takes precedence over emails.
+  """
+  id: ID
+}
+
+"""
+Parameters to be used for the commit_author_email_pattern rule
+"""
+type CommitAuthorEmailPatternParameters {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean!
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+Parameters to be used for the commit_author_email_pattern rule
+"""
+input CommitAuthorEmailPatternParametersInput {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+Represents a comment on a given Commit.
+"""
+type CommitComment implements Comment & Deletable & Minimizable & Node & Reactable & RepositoryNode & Updatable & UpdatableComment {
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the subject of the comment.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  Identifies the comment body.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body rendered to text.
+  """
+  bodyText: String!
+
+  """
+  Identifies the commit associated with the comment, if the commit exists.
+  """
+  commit: Commit
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  The Node ID of the CommitComment object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  Returns whether or not a comment has been minimized.
+  """
+  isMinimized: Boolean!
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  Returns why the comment was minimized. One of `abuse`, `off-topic`,
+  `outdated`, `resolved`, `duplicate` and `spam`. Note that the case and
+  formatting of these values differs from the inputs to the `MinimizeComment` mutation.
+  """
+  minimizedReason: String
+
+  """
+  Identifies the file path associated with the comment.
+  """
+  path: String
+
+  """
+  Identifies the line position associated with the comment.
+  """
+  position: Int
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path permalink for this commit comment.
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL permalink for this commit comment.
+  """
+  url: URI!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Check if the current viewer can minimize this object.
+  """
+  viewerCanMinimize: Boolean!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+}
+
+"""
+The connection type for CommitComment.
+"""
+type CommitCommentConnection {
+  """
+  A list of edges.
+  """
+  edges: [CommitCommentEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CommitComment]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CommitCommentEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CommitComment
+}
+
+"""
+A thread of comments on a commit.
+"""
+type CommitCommentThread implements Node & RepositoryNode {
+  """
+  The comments that exist in this thread.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CommitCommentConnection!
+
+  """
+  The commit the comments were made on.
+  """
+  commit: Commit
+
+  """
+  The Node ID of the CommitCommentThread object
+  """
+  id: ID!
+
+  """
+  The file the comments were made on.
+  """
+  path: String
+
+  """
+  The position in the diff for the commit that the comment was made on.
+  """
+  position: Int
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+}
+
+"""
+The connection type for Commit.
+"""
+type CommitConnection {
+  """
+  A list of edges.
+  """
+  edges: [CommitEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Commit]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Ordering options for commit contribution connections.
+"""
+input CommitContributionOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field by which to order commit contributions.
+  """
+  field: CommitContributionOrderField!
+}
+
+"""
+Properties by which commit contribution connections can be ordered.
+"""
+enum CommitContributionOrderField {
+  """
+  Order commit contributions by how many commits they represent.
+  """
+  COMMIT_COUNT
+
+  """
+  Order commit contributions by when they were made.
+  """
+  OCCURRED_AT
+}
+
+"""
+This aggregates commits made by a user within one repository.
+"""
+type CommitContributionsByRepository {
+  """
+  The commit contributions, each representing a day.
+  """
+  contributions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for commit contributions returned from the connection.
+    """
+    orderBy: CommitContributionOrder = {field: OCCURRED_AT, direction: DESC}
+  ): CreatedCommitContributionConnection!
+
+  """
+  The repository in which the commits were made.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for the user's commits to the repository in this time range.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for the user's commits to the repository in this time range.
+  """
+  url: URI!
+}
+
+"""
+An edge in a connection.
+"""
+type CommitEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Commit
+}
+
+"""
+The connection type for Commit.
+"""
+type CommitHistoryConnection {
+  """
+  A list of edges.
+  """
+  edges: [CommitEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Commit]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+A message to include with a new commit
+"""
+input CommitMessage {
+  """
+  The body of the message.
+  """
+  body: String
+
+  """
+  The headline of the message.
+  """
+  headline: String!
+}
+
+"""
+Parameters to be used for the commit_message_pattern rule
+"""
+type CommitMessagePatternParameters {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean!
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+Parameters to be used for the commit_message_pattern rule
+"""
+input CommitMessagePatternParametersInput {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+A git ref for a commit to be appended to.
+
+The ref must be a branch, i.e. its fully qualified name must start
+with `refs/heads/` (although the input is not required to be fully
+qualified).
+
+The Ref may be specified by its global node ID or by the
+`repositoryNameWithOwner` and `branchName`.
+
+### Examples
+
+Specify a branch using a global node ID:
+
+    { "id": "MDM6UmVmMTpyZWZzL2hlYWRzL21haW4=" }
+
+Specify a branch using `repositoryNameWithOwner` and `branchName`:
+
+    {
+      "repositoryNameWithOwner": "github/graphql-client",
+      "branchName": "main"
+    }
+"""
+input CommittableBranch {
+  """
+  The unqualified name of the branch to append the commit to.
+  """
+  branchName: String
+
+  """
+  The Node ID of the Ref to be updated.
+  """
+  id: ID
+
+  """
+  The nameWithOwner of the repository to commit to.
+  """
+  repositoryNameWithOwner: String
+}
+
+"""
+Parameters to be used for the committer_email_pattern rule
+"""
+type CommitterEmailPatternParameters {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean!
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+Parameters to be used for the committer_email_pattern rule
+"""
+input CommitterEmailPatternParametersInput {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+Represents a comparison between two commit revisions.
+"""
+type Comparison implements Node {
+  """
+  The number of commits ahead of the base branch.
+  """
+  aheadBy: Int!
+
+  """
+  The base revision of this comparison.
+  """
+  baseTarget: GitObject!
+
+  """
+  The number of commits behind the base branch.
+  """
+  behindBy: Int!
+
+  """
+  The commits which compose this comparison.
+  """
+  commits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ComparisonCommitConnection!
+
+  """
+  The head revision of this comparison.
+  """
+  headTarget: GitObject!
+
+  """
+  The Node ID of the Comparison object
+  """
+  id: ID!
+
+  """
+  The status of this comparison.
+  """
+  status: ComparisonStatus!
+}
+
+"""
+The connection type for Commit.
+"""
+type ComparisonCommitConnection {
+  """
+  The total count of authors and co-authors across all commits.
+  """
+  authorCount: Int!
+
+  """
+  A list of edges.
+  """
+  edges: [CommitEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Commit]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+The status of a git comparison between two refs.
+"""
+enum ComparisonStatus {
+  """
+  The head ref is ahead of the base ref.
+  """
+  AHEAD
+
+  """
+  The head ref is behind the base ref.
+  """
+  BEHIND
+
+  """
+  The head ref is both ahead and behind of the base ref, indicating git history has diverged.
+  """
+  DIVERGED
+
+  """
+  The head ref and base ref are identical.
+  """
+  IDENTICAL
+}
+
+"""
+Represents a 'connected' event on a given issue or pull request.
+"""
+type ConnectedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the ConnectedEvent object
+  """
+  id: ID!
+
+  """
+  Reference originated in a different repository.
+  """
+  isCrossRepository: Boolean!
+
+  """
+  Issue or pull request that made the reference.
+  """
+  source: ReferencedSubject!
+
+  """
+  Issue or pull request which was connected.
+  """
+  subject: ReferencedSubject!
+}
+
+"""
+The Contributing Guidelines for a repository.
+"""
+type ContributingGuidelines {
+  """
+  The body of the Contributing Guidelines.
+  """
+  body: String
+
+  """
+  The HTTP path for the Contributing Guidelines.
+  """
+  resourcePath: URI
+
+  """
+  The HTTP URL for the Contributing Guidelines.
+  """
+  url: URI
+}
+
+"""
+Represents a contribution a user made on GitHub, such as opening an issue.
+"""
+interface Contribution {
+  """
+  Whether this contribution is associated with a record you do not have access to. For
+  example, your own 'first issue' contribution may have been made on a repository you can no
+  longer access.
+  """
+  isRestricted: Boolean!
+
+  """
+  When this contribution was made.
+  """
+  occurredAt: DateTime!
+
+  """
+  The HTTP path for this contribution.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this contribution.
+  """
+  url: URI!
+
+  """
+  The user who made this contribution.
+  """
+  user: User!
+}
+
+"""
+A calendar of contributions made on GitHub by a user.
+"""
+type ContributionCalendar {
+  """
+  A list of hex color codes used in this calendar. The darker the color, the more contributions it represents.
+  """
+  colors: [String!]!
+
+  """
+  Determine if the color set was chosen because it's currently Halloween.
+  """
+  isHalloween: Boolean!
+
+  """
+  A list of the months of contributions in this calendar.
+  """
+  months: [ContributionCalendarMonth!]!
+
+  """
+  The count of total contributions in the calendar.
+  """
+  totalContributions: Int!
+
+  """
+  A list of the weeks of contributions in this calendar.
+  """
+  weeks: [ContributionCalendarWeek!]!
+}
+
+"""
+Represents a single day of contributions on GitHub by a user.
+"""
+type ContributionCalendarDay {
+  """
+  The hex color code that represents how many contributions were made on this day compared to others in the calendar.
+  """
+  color: String!
+
+  """
+  How many contributions were made by the user on this day.
+  """
+  contributionCount: Int!
+
+  """
+  Indication of contributions, relative to other days. Can be used to indicate
+  which color to represent this day on a calendar.
+  """
+  contributionLevel: ContributionLevel!
+
+  """
+  The day this square represents.
+  """
+  date: Date!
+
+  """
+  A number representing which day of the week this square represents, e.g., 1 is Monday.
+  """
+  weekday: Int!
+}
+
+"""
+A month of contributions in a user's contribution graph.
+"""
+type ContributionCalendarMonth {
+  """
+  The date of the first day of this month.
+  """
+  firstDay: Date!
+
+  """
+  The name of the month.
+  """
+  name: String!
+
+  """
+  How many weeks started in this month.
+  """
+  totalWeeks: Int!
+
+  """
+  The year the month occurred in.
+  """
+  year: Int!
+}
+
+"""
+A week of contributions in a user's contribution graph.
+"""
+type ContributionCalendarWeek {
+  """
+  The days of contributions in this week.
+  """
+  contributionDays: [ContributionCalendarDay!]!
+
+  """
+  The date of the earliest square in this week.
+  """
+  firstDay: Date!
+}
+
+"""
+Varying levels of contributions from none to many.
+"""
+enum ContributionLevel {
+  """
+  Lowest 25% of days of contributions.
+  """
+  FIRST_QUARTILE
+
+  """
+  Highest 25% of days of contributions. More contributions than the third quartile.
+  """
+  FOURTH_QUARTILE
+
+  """
+  No contributions occurred.
+  """
+  NONE
+
+  """
+  Second lowest 25% of days of contributions. More contributions than the first quartile.
+  """
+  SECOND_QUARTILE
+
+  """
+  Second highest 25% of days of contributions. More contributions than second quartile, less than the fourth quartile.
+  """
+  THIRD_QUARTILE
+}
+
+"""
+Ordering options for contribution connections.
+"""
+input ContributionOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+}
+
+"""
+A contributions collection aggregates contributions such as opened issues and commits created by a user.
+"""
+type ContributionsCollection {
+  """
+  Commit contributions made by the user, grouped by repository.
+  """
+  commitContributionsByRepository(
+    """
+    How many repositories should be included.
+    """
+    maxRepositories: Int = 25
+  ): [CommitContributionsByRepository!]!
+
+  """
+  A calendar of this user's contributions on GitHub.
+  """
+  contributionCalendar: ContributionCalendar!
+
+  """
+  The years the user has been making contributions with the most recent year first.
+  """
+  contributionYears: [Int!]!
+
+  """
+  Determine if this collection's time span ends in the current month.
+  """
+  doesEndInCurrentMonth: Boolean!
+
+  """
+  The date of the first restricted contribution the user made in this time
+  period. Can only be non-null when the user has enabled private contribution counts.
+  """
+  earliestRestrictedContributionDate: Date
+
+  """
+  The ending date and time of this collection.
+  """
+  endedAt: DateTime!
+
+  """
+  The first issue the user opened on GitHub. This will be null if that issue was
+  opened outside the collection's time range and ignoreTimeRange is false. If
+  the issue is not visible but the user has opted to show private contributions,
+  a RestrictedContribution will be returned.
+  """
+  firstIssueContribution: CreatedIssueOrRestrictedContribution
+
+  """
+  The first pull request the user opened on GitHub. This will be null if that
+  pull request was opened outside the collection's time range and
+  ignoreTimeRange is not true. If the pull request is not visible but the user
+  has opted to show private contributions, a RestrictedContribution will be returned.
+  """
+  firstPullRequestContribution: CreatedPullRequestOrRestrictedContribution
+
+  """
+  The first repository the user created on GitHub. This will be null if that
+  first repository was created outside the collection's time range and
+  ignoreTimeRange is false. If the repository is not visible, then a
+  RestrictedContribution is returned.
+  """
+  firstRepositoryContribution: CreatedRepositoryOrRestrictedContribution
+
+  """
+  Does the user have any more activity in the timeline that occurred prior to the collection's time range?
+  """
+  hasActivityInThePast: Boolean!
+
+  """
+  Determine if there are any contributions in this collection.
+  """
+  hasAnyContributions: Boolean!
+
+  """
+  Determine if the user made any contributions in this time frame whose details
+  are not visible because they were made in a private repository. Can only be
+  true if the user enabled private contribution counts.
+  """
+  hasAnyRestrictedContributions: Boolean!
+
+  """
+  Whether or not the collector's time span is all within the same day.
+  """
+  isSingleDay: Boolean!
+
+  """
+  A list of issues the user opened.
+  """
+  issueContributions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Should the user's first issue ever be excluded from the result.
+    """
+    excludeFirst: Boolean = false
+
+    """
+    Should the user's most commented issue be excluded from the result.
+    """
+    excludePopular: Boolean = false
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for contributions returned from the connection.
+    """
+    orderBy: ContributionOrder = {direction: DESC}
+  ): CreatedIssueContributionConnection!
+
+  """
+  Issue contributions made by the user, grouped by repository.
+  """
+  issueContributionsByRepository(
+    """
+    Should the user's first issue ever be excluded from the result.
+    """
+    excludeFirst: Boolean = false
+
+    """
+    Should the user's most commented issue be excluded from the result.
+    """
+    excludePopular: Boolean = false
+
+    """
+    How many repositories should be included.
+    """
+    maxRepositories: Int = 25
+  ): [IssueContributionsByRepository!]!
+
+  """
+  When the user signed up for GitHub. This will be null if that sign up date
+  falls outside the collection's time range and ignoreTimeRange is false.
+  """
+  joinedGitHubContribution: JoinedGitHubContribution
+
+  """
+  The date of the most recent restricted contribution the user made in this time
+  period. Can only be non-null when the user has enabled private contribution counts.
+  """
+  latestRestrictedContributionDate: Date
+
+  """
+  When this collection's time range does not include any activity from the user, use this
+  to get a different collection from an earlier time range that does have activity.
+  """
+  mostRecentCollectionWithActivity: ContributionsCollection
+
+  """
+  Returns a different contributions collection from an earlier time range than this one
+  that does not have any contributions.
+  """
+  mostRecentCollectionWithoutActivity: ContributionsCollection
+
+  """
+  The issue the user opened on GitHub that received the most comments in the specified
+  time frame.
+  """
+  popularIssueContribution: CreatedIssueContribution
+
+  """
+  The pull request the user opened on GitHub that received the most comments in the
+  specified time frame.
+  """
+  popularPullRequestContribution: CreatedPullRequestContribution
+
+  """
+  Pull request contributions made by the user.
+  """
+  pullRequestContributions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Should the user's first pull request ever be excluded from the result.
+    """
+    excludeFirst: Boolean = false
+
+    """
+    Should the user's most commented pull request be excluded from the result.
+    """
+    excludePopular: Boolean = false
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for contributions returned from the connection.
+    """
+    orderBy: ContributionOrder = {direction: DESC}
+  ): CreatedPullRequestContributionConnection!
+
+  """
+  Pull request contributions made by the user, grouped by repository.
+  """
+  pullRequestContributionsByRepository(
+    """
+    Should the user's first pull request ever be excluded from the result.
+    """
+    excludeFirst: Boolean = false
+
+    """
+    Should the user's most commented pull request be excluded from the result.
+    """
+    excludePopular: Boolean = false
+
+    """
+    How many repositories should be included.
+    """
+    maxRepositories: Int = 25
+  ): [PullRequestContributionsByRepository!]!
+
+  """
+  Pull request review contributions made by the user. Returns the most recently
+  submitted review for each PR reviewed by the user.
+  """
+  pullRequestReviewContributions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for contributions returned from the connection.
+    """
+    orderBy: ContributionOrder = {direction: DESC}
+  ): CreatedPullRequestReviewContributionConnection!
+
+  """
+  Pull request review contributions made by the user, grouped by repository.
+  """
+  pullRequestReviewContributionsByRepository(
+    """
+    How many repositories should be included.
+    """
+    maxRepositories: Int = 25
+  ): [PullRequestReviewContributionsByRepository!]!
+
+  """
+  A list of repositories owned by the user that the user created in this time range.
+  """
+  repositoryContributions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Should the user's first repository ever be excluded from the result.
+    """
+    excludeFirst: Boolean = false
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for contributions returned from the connection.
+    """
+    orderBy: ContributionOrder = {direction: DESC}
+  ): CreatedRepositoryContributionConnection!
+
+  """
+  A count of contributions made by the user that the viewer cannot access. Only
+  non-zero when the user has chosen to share their private contribution counts.
+  """
+  restrictedContributionsCount: Int!
+
+  """
+  The beginning date and time of this collection.
+  """
+  startedAt: DateTime!
+
+  """
+  How many commits were made by the user in this time span.
+  """
+  totalCommitContributions: Int!
+
+  """
+  How many issues the user opened.
+  """
+  totalIssueContributions(
+    """
+    Should the user's first issue ever be excluded from this count.
+    """
+    excludeFirst: Boolean = false
+
+    """
+    Should the user's most commented issue be excluded from this count.
+    """
+    excludePopular: Boolean = false
+  ): Int!
+
+  """
+  How many pull requests the user opened.
+  """
+  totalPullRequestContributions(
+    """
+    Should the user's first pull request ever be excluded from this count.
+    """
+    excludeFirst: Boolean = false
+
+    """
+    Should the user's most commented pull request be excluded from this count.
+    """
+    excludePopular: Boolean = false
+  ): Int!
+
+  """
+  How many pull request reviews the user left.
+  """
+  totalPullRequestReviewContributions: Int!
+
+  """
+  How many different repositories the user committed to.
+  """
+  totalRepositoriesWithContributedCommits: Int!
+
+  """
+  How many different repositories the user opened issues in.
+  """
+  totalRepositoriesWithContributedIssues(
+    """
+    Should the user's first issue ever be excluded from this count.
+    """
+    excludeFirst: Boolean = false
+
+    """
+    Should the user's most commented issue be excluded from this count.
+    """
+    excludePopular: Boolean = false
+  ): Int!
+
+  """
+  How many different repositories the user left pull request reviews in.
+  """
+  totalRepositoriesWithContributedPullRequestReviews: Int!
+
+  """
+  How many different repositories the user opened pull requests in.
+  """
+  totalRepositoriesWithContributedPullRequests(
+    """
+    Should the user's first pull request ever be excluded from this count.
+    """
+    excludeFirst: Boolean = false
+
+    """
+    Should the user's most commented pull request be excluded from this count.
+    """
+    excludePopular: Boolean = false
+  ): Int!
+
+  """
+  How many repositories the user created.
+  """
+  totalRepositoryContributions(
+    """
+    Should the user's first repository ever be excluded from this count.
+    """
+    excludeFirst: Boolean = false
+  ): Int!
+
+  """
+  The user who made the contributions in this collection.
+  """
+  user: User!
+}
+
+"""
+Autogenerated input type of ConvertProjectCardNoteToIssue
+"""
+input ConvertProjectCardNoteToIssueInput {
+  """
+  The body of the newly created issue.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ProjectCard ID to convert.
+  """
+  projectCardId: ID! @possibleTypes(concreteTypes: ["ProjectCard"])
+
+  """
+  The ID of the repository to create the issue in.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  The title of the newly created issue. Defaults to the card's note text.
+  """
+  title: String
+}
+
+"""
+Autogenerated return type of ConvertProjectCardNoteToIssue
+"""
+type ConvertProjectCardNoteToIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated ProjectCard.
+  """
+  projectCard: ProjectCard
+}
+
+"""
+Autogenerated input type of ConvertPullRequestToDraft
+"""
+input ConvertPullRequestToDraftInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the pull request to convert to draft
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of ConvertPullRequestToDraft
+"""
+type ConvertPullRequestToDraftPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request that is now a draft.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents a 'convert_to_draft' event on a given pull request.
+"""
+type ConvertToDraftEvent implements Node & UniformResourceLocatable {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the ConvertToDraftEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+
+  """
+  The HTTP path for this convert to draft event.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this convert to draft event.
+  """
+  url: URI!
+}
+
+"""
+Represents a 'converted_note_to_issue' event on a given issue or pull request.
+"""
+type ConvertedNoteToIssueEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the ConvertedNoteToIssueEvent object
+  """
+  id: ID!
+
+  """
+  Project referenced by event.
+  """
+  project: Project @preview(toggledBy: "starfox-preview")
+
+  """
+  Project card referenced by this project event.
+  """
+  projectCard: ProjectCard @preview(toggledBy: "starfox-preview")
+
+  """
+  Column name referenced by this project event.
+  """
+  projectColumnName: String! @preview(toggledBy: "starfox-preview")
+}
+
+"""
+Represents a 'converted_to_discussion' event on a given issue.
+"""
+type ConvertedToDiscussionEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The discussion that the issue was converted into.
+  """
+  discussion: Discussion
+
+  """
+  The Node ID of the ConvertedToDiscussionEvent object
+  """
+  id: ID!
+}
+
+"""
+Autogenerated input type of CopyProjectV2
+"""
+input CopyProjectV2Input {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Include draft issues in the new project
+  """
+  includeDraftIssues: Boolean = false
+
+  """
+  The owner ID of the new project.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "OrganizationOrUser")
+
+  """
+  The ID of the source Project to copy.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+
+  """
+  The title of the project.
+  """
+  title: String!
+}
+
+"""
+Autogenerated return type of CopyProjectV2
+"""
+type CopyProjectV2Payload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The copied project.
+  """
+  projectV2: ProjectV2
+}
+
+"""
+Autogenerated input type of CreateAttributionInvitation
+"""
+input CreateAttributionInvitationInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the owner scoping the reattributable data.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Bot", "Enterprise", "Mannequin", "Organization", "User"])
+
+  """
+  The Node ID of the account owning the data to reattribute.
+  """
+  sourceId: ID! @possibleTypes(concreteTypes: ["Bot", "Enterprise", "Mannequin", "Organization", "User"])
+
+  """
+  The Node ID of the account which may claim the data.
+  """
+  targetId: ID! @possibleTypes(concreteTypes: ["Bot", "Enterprise", "Mannequin", "Organization", "User"])
+}
+
+"""
+Autogenerated return type of CreateAttributionInvitation
+"""
+type CreateAttributionInvitationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The owner scoping the reattributable data.
+  """
+  owner: Organization
+
+  """
+  The account owning the data to reattribute.
+  """
+  source: Claimable
+
+  """
+  The account which may claim the data.
+  """
+  target: Claimable
+}
+
+"""
+Autogenerated input type of CreateBranchProtectionRule
+"""
+input CreateBranchProtectionRuleInput {
+  """
+  Can this branch be deleted.
+  """
+  allowsDeletions: Boolean
+
+  """
+  Are force pushes allowed on this branch.
+  """
+  allowsForcePushes: Boolean
+
+  """
+  Is branch creation a protected operation.
+  """
+  blocksCreations: Boolean
+
+  """
+  A list of User, Team, or App IDs allowed to bypass force push targeting matching branches.
+  """
+  bypassForcePushActorIds: [ID!]
+
+  """
+  A list of User, Team, or App IDs allowed to bypass pull requests targeting matching branches.
+  """
+  bypassPullRequestActorIds: [ID!]
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Will new commits pushed to matching branches dismiss pull request review approvals.
+  """
+  dismissesStaleReviews: Boolean
+
+  """
+  Can admins overwrite branch protection.
+  """
+  isAdminEnforced: Boolean
+
+  """
+  Whether users can pull changes from upstream when the branch is locked. Set to
+  `true` to allow fork syncing. Set to `false` to prevent fork syncing.
+  """
+  lockAllowsFetchAndMerge: Boolean
+
+  """
+  Whether to set the branch as read-only. If this is true, users will not be able to push to the branch.
+  """
+  lockBranch: Boolean
+
+  """
+  The glob-like pattern used to determine matching branches.
+  """
+  pattern: String!
+
+  """
+  A list of User, Team, or App IDs allowed to push to matching branches.
+  """
+  pushActorIds: [ID!]
+
+  """
+  The global relay id of the repository in which a new branch protection rule should be created in.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  Whether the most recent push must be approved by someone other than the person who pushed it
+  """
+  requireLastPushApproval: Boolean
+
+  """
+  Number of approving reviews required to update matching branches.
+  """
+  requiredApprovingReviewCount: Int
+
+  """
+  The list of required deployment environments
+  """
+  requiredDeploymentEnvironments: [String!]
+
+  """
+  List of required status check contexts that must pass for commits to be accepted to matching branches.
+  """
+  requiredStatusCheckContexts: [String!]
+
+  """
+  The list of required status checks
+  """
+  requiredStatusChecks: [RequiredStatusCheckInput!]
+
+  """
+  Are approving reviews required to update matching branches.
+  """
+  requiresApprovingReviews: Boolean
+
+  """
+  Are reviews from code owners required to update matching branches.
+  """
+  requiresCodeOwnerReviews: Boolean
+
+  """
+  Are commits required to be signed.
+  """
+  requiresCommitSignatures: Boolean
+
+  """
+  Are conversations required to be resolved before merging.
+  """
+  requiresConversationResolution: Boolean
+
+  """
+  Are successful deployments required before merging.
+  """
+  requiresDeployments: Boolean
+
+  """
+  Are merge commits prohibited from being pushed to this branch.
+  """
+  requiresLinearHistory: Boolean
+
+  """
+  Are status checks required to update matching branches.
+  """
+  requiresStatusChecks: Boolean
+
+  """
+  Are branches required to be up to date before merging.
+  """
+  requiresStrictStatusChecks: Boolean
+
+  """
+  Is pushing to matching branches restricted.
+  """
+  restrictsPushes: Boolean
+
+  """
+  Is dismissal of pull request reviews restricted.
+  """
+  restrictsReviewDismissals: Boolean
+
+  """
+  A list of User, Team, or App IDs allowed to dismiss reviews on pull requests targeting matching branches.
+  """
+  reviewDismissalActorIds: [ID!]
+}
+
+"""
+Autogenerated return type of CreateBranchProtectionRule
+"""
+type CreateBranchProtectionRulePayload {
+  """
+  The newly created BranchProtectionRule.
+  """
+  branchProtectionRule: BranchProtectionRule
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of CreateCheckRun
+"""
+input CreateCheckRunInput {
+  """
+  Possible further actions the integrator can perform, which a user may trigger.
+  """
+  actions: [CheckRunAction!]
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The time that the check run finished.
+  """
+  completedAt: DateTime
+
+  """
+  The final conclusion of the check.
+  """
+  conclusion: CheckConclusionState
+
+  """
+  The URL of the integrator's site that has the full details of the check.
+  """
+  detailsUrl: URI
+
+  """
+  A reference for the run on the integrator's system.
+  """
+  externalId: String
+
+  """
+  The SHA of the head commit.
+  """
+  headSha: GitObjectID!
+
+  """
+  The name of the check.
+  """
+  name: String!
+
+  """
+  Descriptive details about the run.
+  """
+  output: CheckRunOutput
+
+  """
+  The node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  The time that the check run began.
+  """
+  startedAt: DateTime
+
+  """
+  The current status.
+  """
+  status: RequestableCheckStatusState
+}
+
+"""
+Autogenerated return type of CreateCheckRun
+"""
+type CreateCheckRunPayload {
+  """
+  The newly created check run.
+  """
+  checkRun: CheckRun
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of CreateCheckSuite
+"""
+input CreateCheckSuiteInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The SHA of the head commit.
+  """
+  headSha: GitObjectID!
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of CreateCheckSuite
+"""
+type CreateCheckSuitePayload {
+  """
+  The newly created check suite.
+  """
+  checkSuite: CheckSuite
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of CreateCommitOnBranch
+"""
+input CreateCommitOnBranchInput {
+  """
+  The Ref to be updated.  Must be a branch.
+  """
+  branch: CommittableBranch!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The git commit oid expected at the head of the branch prior to the commit
+  """
+  expectedHeadOid: GitObjectID!
+
+  """
+  A description of changes to files in this commit.
+  """
+  fileChanges: FileChanges
+
+  """
+  The commit message the be included with the commit.
+  """
+  message: CommitMessage!
+}
+
+"""
+Autogenerated return type of CreateCommitOnBranch
+"""
+type CreateCommitOnBranchPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new commit.
+  """
+  commit: Commit
+
+  """
+  The ref which has been updated to point to the new commit.
+  """
+  ref: Ref
+}
+
+"""
+Autogenerated input type of CreateDeployment
+"""
+input CreateDeploymentInput @preview(toggledBy: "flash-preview") {
+  """
+  Attempt to automatically merge the default branch into the requested ref, defaults to true.
+  """
+  autoMerge: Boolean = true
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Short description of the deployment.
+  """
+  description: String = ""
+
+  """
+  Name for the target deployment environment.
+  """
+  environment: String = "production"
+
+  """
+  JSON payload with extra information about the deployment.
+  """
+  payload: String = "{}"
+
+  """
+  The node ID of the ref to be deployed.
+  """
+  refId: ID! @possibleTypes(concreteTypes: ["Ref"])
+
+  """
+  The node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  The status contexts to verify against commit status checks. To bypass required
+  contexts, pass an empty array. Defaults to all unique contexts.
+  """
+  requiredContexts: [String!]
+
+  """
+  Specifies a task to execute.
+  """
+  task: String = "deploy"
+}
+
+"""
+Autogenerated return type of CreateDeployment
+"""
+type CreateDeploymentPayload @preview(toggledBy: "flash-preview") {
+  """
+  True if the default branch has been auto-merged into the deployment ref.
+  """
+  autoMerged: Boolean
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new deployment.
+  """
+  deployment: Deployment
+}
+
+"""
+Autogenerated input type of CreateDeploymentStatus
+"""
+input CreateDeploymentStatusInput @preview(toggledBy: "flash-preview") {
+  """
+  Adds a new inactive status to all non-transient, non-production environment
+  deployments with the same repository and environment name as the created
+  status's deployment.
+  """
+  autoInactive: Boolean = true
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The node ID of the deployment.
+  """
+  deploymentId: ID! @possibleTypes(concreteTypes: ["Deployment"])
+
+  """
+  A short description of the status. Maximum length of 140 characters.
+  """
+  description: String = ""
+
+  """
+  If provided, updates the environment of the deploy. Otherwise, does not modify the environment.
+  """
+  environment: String
+
+  """
+  Sets the URL for accessing your environment.
+  """
+  environmentUrl: String = ""
+
+  """
+  The log URL to associate with this status.       This URL should contain
+  output to keep the user updated while the task is running       or serve as
+  historical information for what happened in the deployment.
+  """
+  logUrl: String = ""
+
+  """
+  The state of the deployment.
+  """
+  state: DeploymentStatusState!
+}
+
+"""
+Autogenerated return type of CreateDeploymentStatus
+"""
+type CreateDeploymentStatusPayload @preview(toggledBy: "flash-preview") {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new deployment status.
+  """
+  deploymentStatus: DeploymentStatus
+}
+
+"""
+Autogenerated input type of CreateDiscussion
+"""
+input CreateDiscussionInput {
+  """
+  The body of the discussion.
+  """
+  body: String!
+
+  """
+  The id of the discussion category to associate with this discussion.
+  """
+  categoryId: ID! @possibleTypes(concreteTypes: ["DiscussionCategory"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The id of the repository on which to create the discussion.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  The title of the discussion.
+  """
+  title: String!
+}
+
+"""
+Autogenerated return type of CreateDiscussion
+"""
+type CreateDiscussionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The discussion that was just created.
+  """
+  discussion: Discussion
+}
+
+"""
+Autogenerated input type of CreateEnterpriseOrganization
+"""
+input CreateEnterpriseOrganizationInput {
+  """
+  The logins for the administrators of the new organization.
+  """
+  adminLogins: [String!]!
+
+  """
+  The email used for sending billing receipts.
+  """
+  billingEmail: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise owning the new organization.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The login of the new organization.
+  """
+  login: String!
+
+  """
+  The profile name of the new organization.
+  """
+  profileName: String!
+}
+
+"""
+Autogenerated return type of CreateEnterpriseOrganization
+"""
+type CreateEnterpriseOrganizationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise that owns the created organization.
+  """
+  enterprise: Enterprise
+
+  """
+  The organization that was created.
+  """
+  organization: Organization
+}
+
+"""
+Autogenerated input type of CreateEnvironment
+"""
+input CreateEnvironmentInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of the environment.
+  """
+  name: String!
+
+  """
+  The node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of CreateEnvironment
+"""
+type CreateEnvironmentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new or existing environment.
+  """
+  environment: Environment
+}
+
+"""
+Autogenerated input type of CreateIpAllowListEntry
+"""
+input CreateIpAllowListEntryInput {
+  """
+  An IP address or range of addresses in CIDR notation.
+  """
+  allowListValue: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Whether the IP allow list entry is active when an IP allow list is enabled.
+  """
+  isActive: Boolean!
+
+  """
+  An optional name for the IP allow list entry.
+  """
+  name: String
+
+  """
+  The ID of the owner for which to create the new IP allow list entry.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["App", "Enterprise", "Organization"], abstractType: "IpAllowListOwner")
+}
+
+"""
+Autogenerated return type of CreateIpAllowListEntry
+"""
+type CreateIpAllowListEntryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The IP allow list entry that was created.
+  """
+  ipAllowListEntry: IpAllowListEntry
+}
+
+"""
+Autogenerated input type of CreateIssue
+"""
+input CreateIssueInput {
+  """
+  The Node ID for the user assignee for this issue.
+  """
+  assigneeIds: [ID!] @possibleTypes(concreteTypes: ["User"])
+
+  """
+  The body for the issue description.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of an issue template in the repository, assigns labels and assignees from the template to the issue
+  """
+  issueTemplate: String
+
+  """
+  An array of Node IDs of labels for this issue.
+  """
+  labelIds: [ID!] @possibleTypes(concreteTypes: ["Label"])
+
+  """
+  The Node ID of the milestone for this issue.
+  """
+  milestoneId: ID @possibleTypes(concreteTypes: ["Milestone"])
+
+  """
+  An array of Node IDs for projects associated with this issue.
+  """
+  projectIds: [ID!] @possibleTypes(concreteTypes: ["Project"])
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  The title for the issue.
+  """
+  title: String!
+}
+
+"""
+Autogenerated return type of CreateIssue
+"""
+type CreateIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new issue.
+  """
+  issue: Issue
+}
+
+"""
+Autogenerated input type of CreateLabel
+"""
+input CreateLabelInput @preview(toggledBy: "bane-preview") {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A 6 character hex code, without the leading #, identifying the color of the label.
+  """
+  color: String!
+
+  """
+  A brief description of the label, such as its purpose.
+  """
+  description: String
+
+  """
+  The name of the label.
+  """
+  name: String!
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of CreateLabel
+"""
+type CreateLabelPayload @preview(toggledBy: "bane-preview") {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new label.
+  """
+  label: Label
+}
+
+"""
+Autogenerated input type of CreateLinkedBranch
+"""
+input CreateLinkedBranchInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the issue to link to.
+  """
+  issueId: ID! @possibleTypes(concreteTypes: ["Issue"])
+
+  """
+  The name of the new branch. Defaults to issue number and title.
+  """
+  name: String
+
+  """
+  The commit SHA to base the new branch on.
+  """
+  oid: GitObjectID!
+
+  """
+  ID of the repository to create the branch in. Defaults to the issue repository.
+  """
+  repositoryId: ID @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of CreateLinkedBranch
+"""
+type CreateLinkedBranchPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The issue that was linked to.
+  """
+  issue: Issue
+
+  """
+  The new branch issue reference.
+  """
+  linkedBranch: LinkedBranch
+}
+
+"""
+Autogenerated input type of CreateMigrationSource
+"""
+input CreateMigrationSourceInput {
+  """
+  The migration source access token.
+  """
+  accessToken: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The GitHub personal access token of the user importing to the target repository.
+  """
+  githubPat: String
+
+  """
+  The migration source name.
+  """
+  name: String!
+
+  """
+  The ID of the organization that will own the migration source.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Organization"])
+
+  """
+  The migration source type.
+  """
+  type: MigrationSourceType!
+
+  """
+  The migration source URL, for example `https://github.com` or `https://monalisa.ghe.com`.
+  """
+  url: String
+}
+
+"""
+Autogenerated return type of CreateMigrationSource
+"""
+type CreateMigrationSourcePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The created migration source.
+  """
+  migrationSource: MigrationSource
+}
+
+"""
+Autogenerated input type of CreateProject
+"""
+input CreateProjectInput {
+  """
+  The description of project.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of project.
+  """
+  name: String!
+
+  """
+  The owner ID to create the project under.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Organization", "Repository", "User"], abstractType: "ProjectOwner")
+
+  """
+  A list of repository IDs to create as linked repositories for the project
+  """
+  repositoryIds: [ID!] @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  The name of the GitHub-provided template.
+  """
+  template: ProjectTemplate
+}
+
+"""
+Autogenerated return type of CreateProject
+"""
+type CreateProjectPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new project.
+  """
+  project: Project
+}
+
+"""
+Autogenerated input type of CreateProjectV2Field
+"""
+input CreateProjectV2FieldInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The data type of the field.
+  """
+  dataType: ProjectV2CustomFieldType!
+
+  """
+  The name of the field.
+  """
+  name: String!
+
+  """
+  The ID of the Project to create the field in.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+
+  """
+  Options for a single select field. At least one value is required if data_type is SINGLE_SELECT
+  """
+  singleSelectOptions: [ProjectV2SingleSelectFieldOptionInput!]
+}
+
+"""
+Autogenerated return type of CreateProjectV2Field
+"""
+type CreateProjectV2FieldPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new field.
+  """
+  projectV2Field: ProjectV2FieldConfiguration
+}
+
+"""
+Autogenerated input type of CreateProjectV2
+"""
+input CreateProjectV2Input {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The owner ID to create the project under.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "OrganizationOrUser")
+
+  """
+  The repository to link the project to.
+  """
+  repositoryId: ID @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  The team to link the project to. The team will be granted read permissions.
+  """
+  teamId: ID @possibleTypes(concreteTypes: ["Team"])
+
+  """
+  The title of the project.
+  """
+  title: String!
+}
+
+"""
+Autogenerated return type of CreateProjectV2
+"""
+type CreateProjectV2Payload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new project.
+  """
+  projectV2: ProjectV2
+}
+
+"""
+Autogenerated input type of CreatePullRequest
+"""
+input CreatePullRequestInput {
+  """
+  The name of the branch you want your changes pulled into. This should be an existing branch
+  on the current repository. You cannot update the base branch on a pull request to point
+  to another repository.
+  """
+  baseRefName: String!
+
+  """
+  The contents of the pull request.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Indicates whether this pull request should be a draft.
+  """
+  draft: Boolean = false
+
+  """
+  The name of the branch where your changes are implemented. For cross-repository pull requests
+  in the same network, namespace `head_ref_name` with a user like this: `username:branch`.
+  """
+  headRefName: String!
+
+  """
+  The Node ID of the head repository.
+  """
+  headRepositoryId: ID @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  Indicates whether maintainers can modify the pull request.
+  """
+  maintainerCanModify: Boolean = true
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  The title of the pull request.
+  """
+  title: String!
+}
+
+"""
+Autogenerated return type of CreatePullRequest
+"""
+type CreatePullRequestPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new pull request.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Autogenerated input type of CreateRef
+"""
+input CreateRefInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The fully qualified name of the new Ref (ie: `refs/heads/my_new_branch`).
+  """
+  name: String!
+
+  """
+  The GitObjectID that the new Ref shall target. Must point to a commit.
+  """
+  oid: GitObjectID!
+
+  """
+  The Node ID of the Repository to create the Ref in.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of CreateRef
+"""
+type CreateRefPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The newly created ref.
+  """
+  ref: Ref
+}
+
+"""
+Autogenerated input type of CreateRepository
+"""
+input CreateRepositoryInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A short description of the new repository.
+  """
+  description: String
+
+  """
+  Indicates if the repository should have the issues feature enabled.
+  """
+  hasIssuesEnabled: Boolean = true
+
+  """
+  Indicates if the repository should have the wiki feature enabled.
+  """
+  hasWikiEnabled: Boolean = false
+
+  """
+  The URL for a web page about this repository.
+  """
+  homepageUrl: URI
+
+  """
+  The name of the new repository.
+  """
+  name: String!
+
+  """
+  The ID of the owner for the new repository.
+  """
+  ownerId: ID @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "RepositoryOwner")
+
+  """
+  When an organization is specified as the owner, this ID identifies the team
+  that should be granted access to the new repository.
+  """
+  teamId: ID @possibleTypes(concreteTypes: ["Team"])
+
+  """
+  Whether this repository should be marked as a template such that anyone who
+  can access it can create new repositories with the same files and directory structure.
+  """
+  template: Boolean = false
+
+  """
+  Indicates the repository's visibility level.
+  """
+  visibility: RepositoryVisibility!
+}
+
+"""
+Autogenerated return type of CreateRepository
+"""
+type CreateRepositoryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new repository.
+  """
+  repository: Repository
+}
+
+"""
+Autogenerated input type of CreateRepositoryRuleset
+"""
+input CreateRepositoryRulesetInput {
+  """
+  A list of actors that are allowed to bypass rules in this ruleset.
+  """
+  bypassActors: [RepositoryRulesetBypassActorInput!]
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The set of conditions for this ruleset
+  """
+  conditions: RepositoryRuleConditionsInput!
+
+  """
+  The enforcement level for this ruleset
+  """
+  enforcement: RuleEnforcement!
+
+  """
+  The name of the ruleset.
+  """
+  name: String!
+
+  """
+  The list of rules for this ruleset
+  """
+  rules: [RepositoryRuleInput!]
+
+  """
+  The global relay id of the source in which a new ruleset should be created in.
+  """
+  sourceId: ID! @possibleTypes(concreteTypes: ["Organization", "Repository"], abstractType: "RuleSource")
+
+  """
+  The target of the ruleset.
+  """
+  target: RepositoryRulesetTarget
+}
+
+"""
+Autogenerated return type of CreateRepositoryRuleset
+"""
+type CreateRepositoryRulesetPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The newly created Ruleset.
+  """
+  ruleset: RepositoryRuleset
+}
+
+"""
+Autogenerated input type of CreateSponsorsListing
+"""
+input CreateSponsorsListingInput {
+  """
+  The country or region where the sponsorable's bank account is located.
+  Required if fiscalHostLogin is not specified, ignored when fiscalHostLogin is specified.
+  """
+  billingCountryOrRegionCode: SponsorsCountryOrRegionCode
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The email address we should use to contact you about the GitHub Sponsors
+  profile being created. This will not be shared publicly. Must be a verified
+  email address already on your GitHub account. Only relevant when the
+  sponsorable is yourself. Defaults to your primary email address on file if omitted.
+  """
+  contactEmail: String
+
+  """
+  The username of the supported fiscal host's GitHub organization, if you want
+  to receive sponsorship payouts through a fiscal host rather than directly to a
+  bank account. For example, 'Open-Source-Collective' for Open Source Collective
+  or 'numfocus' for numFOCUS. Case insensitive. See https://docs.github.com/sponsors/receiving-sponsorships-through-github-sponsors/using-a-fiscal-host-to-receive-github-sponsors-payouts
+  for more information.
+  """
+  fiscalHostLogin: String
+
+  """
+  The URL for your profile page on the fiscal host's website, e.g.,
+  https://opencollective.com/babel or https://numfocus.org/project/bokeh.
+  Required if fiscalHostLogin is specified.
+  """
+  fiscallyHostedProjectProfileUrl: String
+
+  """
+  Provide an introduction to serve as the main focus that appears on your GitHub
+  Sponsors profile. It's a great opportunity to help potential sponsors learn
+  more about you, your work, and why their sponsorship is important to you.
+  GitHub-flavored Markdown is supported.
+  """
+  fullDescription: String
+
+  """
+  The country or region where the sponsorable resides. This is for tax purposes.
+  Required if the sponsorable is yourself, ignored when sponsorableLogin
+  specifies an organization.
+  """
+  residenceCountryOrRegionCode: SponsorsCountryOrRegionCode
+
+  """
+  The username of the organization to create a GitHub Sponsors profile for, if
+  desired. Defaults to creating a GitHub Sponsors profile for the authenticated
+  user if omitted.
+  """
+  sponsorableLogin: String
+}
+
+"""
+Autogenerated return type of CreateSponsorsListing
+"""
+type CreateSponsorsListingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new GitHub Sponsors profile.
+  """
+  sponsorsListing: SponsorsListing
+}
+
+"""
+Autogenerated input type of CreateSponsorsTier
+"""
+input CreateSponsorsTierInput {
+  """
+  The value of the new tier in US dollars. Valid values: 1-12000.
+  """
+  amount: Int!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A description of what this tier is, what perks sponsors might receive, what a sponsorship at this tier means for you, etc.
+  """
+  description: String!
+
+  """
+  Whether sponsorships using this tier should happen monthly/yearly or just once.
+  """
+  isRecurring: Boolean = true
+
+  """
+  Whether to make the tier available immediately for sponsors to choose.
+  Defaults to creating a draft tier that will not be publicly visible.
+  """
+  publish: Boolean = false
+
+  """
+  Optional ID of the private repository that sponsors at this tier should gain
+  read-only access to. Must be owned by an organization.
+  """
+  repositoryId: ID @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  Optional name of the private repository that sponsors at this tier should gain
+  read-only access to. Must be owned by an organization. Necessary if
+  repositoryOwnerLogin is given. Will be ignored if repositoryId is given.
+  """
+  repositoryName: String
+
+  """
+  Optional login of the organization owner of the private repository that
+  sponsors at this tier should gain read-only access to. Necessary if
+  repositoryName is given. Will be ignored if repositoryId is given.
+  """
+  repositoryOwnerLogin: String
+
+  """
+  The ID of the user or organization who owns the GitHub Sponsors profile.
+  Defaults to the current user if omitted and sponsorableLogin is not given.
+  """
+  sponsorableId: ID @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "Sponsorable")
+
+  """
+  The username of the user or organization who owns the GitHub Sponsors profile.
+  Defaults to the current user if omitted and sponsorableId is not given.
+  """
+  sponsorableLogin: String
+
+  """
+  Optional message new sponsors at this tier will receive.
+  """
+  welcomeMessage: String
+}
+
+"""
+Autogenerated return type of CreateSponsorsTier
+"""
+type CreateSponsorsTierPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new tier.
+  """
+  sponsorsTier: SponsorsTier
+}
+
+"""
+Autogenerated input type of CreateSponsorship
+"""
+input CreateSponsorshipInput {
+  """
+  The amount to pay to the sponsorable in US dollars. Required if a tierId is not specified. Valid values: 1-12000.
+  """
+  amount: Int
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Whether the sponsorship should happen monthly/yearly or just this one time. Required if a tierId is not specified.
+  """
+  isRecurring: Boolean
+
+  """
+  Specify whether others should be able to see that the sponsor is sponsoring
+  the sponsorable. Public visibility still does not reveal which tier is used.
+  """
+  privacyLevel: SponsorshipPrivacy = PUBLIC
+
+  """
+  Whether the sponsor should receive email updates from the sponsorable.
+  """
+  receiveEmails: Boolean = true
+
+  """
+  The ID of the user or organization who is acting as the sponsor, paying for
+  the sponsorship. Required if sponsorLogin is not given.
+  """
+  sponsorId: ID @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "Sponsor")
+
+  """
+  The username of the user or organization who is acting as the sponsor, paying
+  for the sponsorship. Required if sponsorId is not given.
+  """
+  sponsorLogin: String
+
+  """
+  The ID of the user or organization who is receiving the sponsorship. Required if sponsorableLogin is not given.
+  """
+  sponsorableId: ID @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "Sponsorable")
+
+  """
+  The username of the user or organization who is receiving the sponsorship. Required if sponsorableId is not given.
+  """
+  sponsorableLogin: String
+
+  """
+  The ID of one of sponsorable's existing tiers to sponsor at. Required if amount is not specified.
+  """
+  tierId: ID @possibleTypes(concreteTypes: ["SponsorsTier"])
+}
+
+"""
+Autogenerated return type of CreateSponsorship
+"""
+type CreateSponsorshipPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The sponsorship that was started.
+  """
+  sponsorship: Sponsorship
+}
+
+"""
+Autogenerated input type of CreateSponsorships
+"""
+input CreateSponsorshipsInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Specify whether others should be able to see that the sponsor is sponsoring
+  the sponsorables. Public visibility still does not reveal the dollar value of
+  the sponsorship.
+  """
+  privacyLevel: SponsorshipPrivacy = PUBLIC
+
+  """
+  Whether the sponsor should receive email updates from the sponsorables.
+  """
+  receiveEmails: Boolean = false
+
+  """
+  The username of the user or organization who is acting as the sponsor, paying for the sponsorships.
+  """
+  sponsorLogin: String!
+
+  """
+  The list of maintainers to sponsor and for how much apiece.
+  """
+  sponsorships: [BulkSponsorship!]!
+}
+
+"""
+Autogenerated return type of CreateSponsorships
+"""
+type CreateSponsorshipsPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The users and organizations who received a sponsorship.
+  """
+  sponsorables: [Sponsorable!]
+}
+
+"""
+Autogenerated input type of CreateTeamDiscussionComment
+"""
+input CreateTeamDiscussionCommentInput {
+  """
+  The content of the comment. This field is required.
+
+  **Upcoming Change on 2024-07-01 UTC**
+  **Description:** `body` will be removed. Follow the guide at
+  https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to
+  find a suitable replacement.
+  **Reason:** The Team Discussions feature is deprecated in favor of Organization Discussions.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the discussion to which the comment belongs. This field is required.
+
+  **Upcoming Change on 2024-07-01 UTC**
+  **Description:** `discussionId` will be removed. Follow the guide at
+  https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to
+  find a suitable replacement.
+  **Reason:** The Team Discussions feature is deprecated in favor of Organization Discussions.
+  """
+  discussionId: ID @possibleTypes(concreteTypes: ["TeamDiscussion"])
+}
+
+"""
+Autogenerated return type of CreateTeamDiscussionComment
+"""
+type CreateTeamDiscussionCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new comment.
+  """
+  teamDiscussionComment: TeamDiscussionComment
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+}
+
+"""
+Autogenerated input type of CreateTeamDiscussion
+"""
+input CreateTeamDiscussionInput {
+  """
+  The content of the discussion. This field is required.
+
+  **Upcoming Change on 2024-07-01 UTC**
+  **Description:** `body` will be removed. Follow the guide at
+  https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to
+  find a suitable replacement.
+  **Reason:** The Team Discussions feature is deprecated in favor of Organization Discussions.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  If true, restricts the visibility of this discussion to team members and
+  organization owners. If false or not specified, allows any organization member
+  to view this discussion.
+
+  **Upcoming Change on 2024-07-01 UTC**
+  **Description:** `private` will be removed. Follow the guide at
+  https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to
+  find a suitable replacement.
+  **Reason:** The Team Discussions feature is deprecated in favor of Organization Discussions.
+  """
+  private: Boolean
+
+  """
+  The ID of the team to which the discussion belongs. This field is required.
+
+  **Upcoming Change on 2024-07-01 UTC**
+  **Description:** `teamId` will be removed. Follow the guide at
+  https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to
+  find a suitable replacement.
+  **Reason:** The Team Discussions feature is deprecated in favor of Organization Discussions.
+  """
+  teamId: ID @possibleTypes(concreteTypes: ["Team"])
+
+  """
+  The title of the discussion. This field is required.
+
+  **Upcoming Change on 2024-07-01 UTC**
+  **Description:** `title` will be removed. Follow the guide at
+  https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to
+  find a suitable replacement.
+  **Reason:** The Team Discussions feature is deprecated in favor of Organization Discussions.
+  """
+  title: String
+}
+
+"""
+Autogenerated return type of CreateTeamDiscussion
+"""
+type CreateTeamDiscussionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new discussion.
+  """
+  teamDiscussion: TeamDiscussion
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+}
+
+"""
+Represents the contribution a user made by committing to a repository.
+"""
+type CreatedCommitContribution implements Contribution {
+  """
+  How many commits were made on this day to this repository by the user.
+  """
+  commitCount: Int!
+
+  """
+  Whether this contribution is associated with a record you do not have access to. For
+  example, your own 'first issue' contribution may have been made on a repository you can no
+  longer access.
+  """
+  isRestricted: Boolean!
+
+  """
+  When this contribution was made.
+  """
+  occurredAt: DateTime!
+
+  """
+  The repository the user made a commit in.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this contribution.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this contribution.
+  """
+  url: URI!
+
+  """
+  The user who made this contribution.
+  """
+  user: User!
+}
+
+"""
+The connection type for CreatedCommitContribution.
+"""
+type CreatedCommitContributionConnection {
+  """
+  A list of edges.
+  """
+  edges: [CreatedCommitContributionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CreatedCommitContribution]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of commits across days and repositories in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CreatedCommitContributionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CreatedCommitContribution
+}
+
+"""
+Represents the contribution a user made on GitHub by opening an issue.
+"""
+type CreatedIssueContribution implements Contribution {
+  """
+  Whether this contribution is associated with a record you do not have access to. For
+  example, your own 'first issue' contribution may have been made on a repository you can no
+  longer access.
+  """
+  isRestricted: Boolean!
+
+  """
+  The issue that was opened.
+  """
+  issue: Issue!
+
+  """
+  When this contribution was made.
+  """
+  occurredAt: DateTime!
+
+  """
+  The HTTP path for this contribution.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this contribution.
+  """
+  url: URI!
+
+  """
+  The user who made this contribution.
+  """
+  user: User!
+}
+
+"""
+The connection type for CreatedIssueContribution.
+"""
+type CreatedIssueContributionConnection {
+  """
+  A list of edges.
+  """
+  edges: [CreatedIssueContributionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CreatedIssueContribution]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CreatedIssueContributionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CreatedIssueContribution
+}
+
+"""
+Represents either a issue the viewer can access or a restricted contribution.
+"""
+union CreatedIssueOrRestrictedContribution = CreatedIssueContribution | RestrictedContribution
+
+"""
+Represents the contribution a user made on GitHub by opening a pull request.
+"""
+type CreatedPullRequestContribution implements Contribution {
+  """
+  Whether this contribution is associated with a record you do not have access to. For
+  example, your own 'first issue' contribution may have been made on a repository you can no
+  longer access.
+  """
+  isRestricted: Boolean!
+
+  """
+  When this contribution was made.
+  """
+  occurredAt: DateTime!
+
+  """
+  The pull request that was opened.
+  """
+  pullRequest: PullRequest!
+
+  """
+  The HTTP path for this contribution.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this contribution.
+  """
+  url: URI!
+
+  """
+  The user who made this contribution.
+  """
+  user: User!
+}
+
+"""
+The connection type for CreatedPullRequestContribution.
+"""
+type CreatedPullRequestContributionConnection {
+  """
+  A list of edges.
+  """
+  edges: [CreatedPullRequestContributionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CreatedPullRequestContribution]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CreatedPullRequestContributionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CreatedPullRequestContribution
+}
+
+"""
+Represents either a pull request the viewer can access or a restricted contribution.
+"""
+union CreatedPullRequestOrRestrictedContribution = CreatedPullRequestContribution | RestrictedContribution
+
+"""
+Represents the contribution a user made by leaving a review on a pull request.
+"""
+type CreatedPullRequestReviewContribution implements Contribution {
+  """
+  Whether this contribution is associated with a record you do not have access to. For
+  example, your own 'first issue' contribution may have been made on a repository you can no
+  longer access.
+  """
+  isRestricted: Boolean!
+
+  """
+  When this contribution was made.
+  """
+  occurredAt: DateTime!
+
+  """
+  The pull request the user reviewed.
+  """
+  pullRequest: PullRequest!
+
+  """
+  The review the user left on the pull request.
+  """
+  pullRequestReview: PullRequestReview!
+
+  """
+  The repository containing the pull request that the user reviewed.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this contribution.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this contribution.
+  """
+  url: URI!
+
+  """
+  The user who made this contribution.
+  """
+  user: User!
+}
+
+"""
+The connection type for CreatedPullRequestReviewContribution.
+"""
+type CreatedPullRequestReviewContributionConnection {
+  """
+  A list of edges.
+  """
+  edges: [CreatedPullRequestReviewContributionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CreatedPullRequestReviewContribution]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CreatedPullRequestReviewContributionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CreatedPullRequestReviewContribution
+}
+
+"""
+Represents the contribution a user made on GitHub by creating a repository.
+"""
+type CreatedRepositoryContribution implements Contribution {
+  """
+  Whether this contribution is associated with a record you do not have access to. For
+  example, your own 'first issue' contribution may have been made on a repository you can no
+  longer access.
+  """
+  isRestricted: Boolean!
+
+  """
+  When this contribution was made.
+  """
+  occurredAt: DateTime!
+
+  """
+  The repository that was created.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this contribution.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this contribution.
+  """
+  url: URI!
+
+  """
+  The user who made this contribution.
+  """
+  user: User!
+}
+
+"""
+The connection type for CreatedRepositoryContribution.
+"""
+type CreatedRepositoryContributionConnection {
+  """
+  A list of edges.
+  """
+  edges: [CreatedRepositoryContributionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [CreatedRepositoryContribution]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type CreatedRepositoryContributionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: CreatedRepositoryContribution
+}
+
+"""
+Represents either a repository the viewer can access or a restricted contribution.
+"""
+union CreatedRepositoryOrRestrictedContribution = CreatedRepositoryContribution | RestrictedContribution
+
+"""
+Represents a mention made by one issue or pull request to another.
+"""
+type CrossReferencedEvent implements Node & UniformResourceLocatable {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the CrossReferencedEvent object
+  """
+  id: ID!
+
+  """
+  Reference originated in a different repository.
+  """
+  isCrossRepository: Boolean!
+
+  """
+  Identifies when the reference was made.
+  """
+  referencedAt: DateTime!
+
+  """
+  The HTTP path for this pull request.
+  """
+  resourcePath: URI!
+
+  """
+  Issue or pull request that made the reference.
+  """
+  source: ReferencedSubject!
+
+  """
+  Issue or pull request to which the reference was made.
+  """
+  target: ReferencedSubject!
+
+  """
+  The HTTP URL for this pull request.
+  """
+  url: URI!
+
+  """
+  Checks if the target will be closed when the source is merged.
+  """
+  willCloseTarget: Boolean!
+}
+
+"""
+An ISO-8601 encoded date string.
+"""
+scalar Date
+
+"""
+An ISO-8601 encoded UTC date string.
+"""
+scalar DateTime
+
+"""
+Autogenerated input type of DeclineTopicSuggestion
+"""
+input DeclineTopicSuggestionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of the suggested topic.
+  """
+  name: String!
+
+  """
+  The reason why the suggested topic is declined.
+  """
+  reason: TopicSuggestionDeclineReason!
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of DeclineTopicSuggestion
+"""
+type DeclineTopicSuggestionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The declined topic.
+  """
+  topic: Topic
+}
+
+"""
+The possible base permissions for repositories.
+"""
+enum DefaultRepositoryPermissionField {
+  """
+  Can read, write, and administrate repos by default
+  """
+  ADMIN
+
+  """
+  No access
+  """
+  NONE
+
+  """
+  Can read repos by default
+  """
+  READ
+
+  """
+  Can read and write repos by default
+  """
+  WRITE
+}
+
+"""
+Entities that can be deleted.
+"""
+interface Deletable {
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+}
+
+"""
+Autogenerated input type of DeleteBranchProtectionRule
+"""
+input DeleteBranchProtectionRuleInput {
+  """
+  The global relay id of the branch protection rule to be deleted.
+  """
+  branchProtectionRuleId: ID! @possibleTypes(concreteTypes: ["BranchProtectionRule"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated return type of DeleteBranchProtectionRule
+"""
+type DeleteBranchProtectionRulePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of DeleteDeployment
+"""
+input DeleteDeploymentInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the deployment to be deleted.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["Deployment"])
+}
+
+"""
+Autogenerated return type of DeleteDeployment
+"""
+type DeleteDeploymentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of DeleteDiscussionComment
+"""
+input DeleteDiscussionCommentInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node id of the discussion comment to delete.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["DiscussionComment"])
+}
+
+"""
+Autogenerated return type of DeleteDiscussionComment
+"""
+type DeleteDiscussionCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The discussion comment that was just deleted.
+  """
+  comment: DiscussionComment
+}
+
+"""
+Autogenerated input type of DeleteDiscussion
+"""
+input DeleteDiscussionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The id of the discussion to delete.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["Discussion"])
+}
+
+"""
+Autogenerated return type of DeleteDiscussion
+"""
+type DeleteDiscussionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The discussion that was just deleted.
+  """
+  discussion: Discussion
+}
+
+"""
+Autogenerated input type of DeleteEnvironment
+"""
+input DeleteEnvironmentInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the environment to be deleted.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["Environment"])
+}
+
+"""
+Autogenerated return type of DeleteEnvironment
+"""
+type DeleteEnvironmentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of DeleteIpAllowListEntry
+"""
+input DeleteIpAllowListEntryInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the IP allow list entry to delete.
+  """
+  ipAllowListEntryId: ID! @possibleTypes(concreteTypes: ["IpAllowListEntry"])
+}
+
+"""
+Autogenerated return type of DeleteIpAllowListEntry
+"""
+type DeleteIpAllowListEntryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The IP allow list entry that was deleted.
+  """
+  ipAllowListEntry: IpAllowListEntry
+}
+
+"""
+Autogenerated input type of DeleteIssueComment
+"""
+input DeleteIssueCommentInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the comment to delete.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["IssueComment"])
+}
+
+"""
+Autogenerated return type of DeleteIssueComment
+"""
+type DeleteIssueCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of DeleteIssue
+"""
+input DeleteIssueInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the issue to delete.
+  """
+  issueId: ID! @possibleTypes(concreteTypes: ["Issue"])
+}
+
+"""
+Autogenerated return type of DeleteIssue
+"""
+type DeleteIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The repository the issue belonged to
+  """
+  repository: Repository
+}
+
+"""
+Autogenerated input type of DeleteLabel
+"""
+input DeleteLabelInput @preview(toggledBy: "bane-preview") {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the label to be deleted.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["Label"])
+}
+
+"""
+Autogenerated return type of DeleteLabel
+"""
+type DeleteLabelPayload @preview(toggledBy: "bane-preview") {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of DeleteLinkedBranch
+"""
+input DeleteLinkedBranchInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the linked branch
+  """
+  linkedBranchId: ID! @possibleTypes(concreteTypes: ["LinkedBranch"])
+}
+
+"""
+Autogenerated return type of DeleteLinkedBranch
+"""
+type DeleteLinkedBranchPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The issue the linked branch was unlinked from.
+  """
+  issue: Issue
+}
+
+"""
+Autogenerated input type of DeletePackageVersion
+"""
+input DeletePackageVersionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the package version to be deleted.
+  """
+  packageVersionId: ID! @possibleTypes(concreteTypes: ["PackageVersion"])
+}
+
+"""
+Autogenerated return type of DeletePackageVersion
+"""
+type DeletePackageVersionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Whether or not the operation succeeded.
+  """
+  success: Boolean
+}
+
+"""
+Autogenerated input type of DeleteProjectCard
+"""
+input DeleteProjectCardInput {
+  """
+  The id of the card to delete.
+  """
+  cardId: ID! @possibleTypes(concreteTypes: ["ProjectCard"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated return type of DeleteProjectCard
+"""
+type DeleteProjectCardPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The column the deleted card was in.
+  """
+  column: ProjectColumn
+
+  """
+  The deleted card ID.
+  """
+  deletedCardId: ID
+}
+
+"""
+Autogenerated input type of DeleteProjectColumn
+"""
+input DeleteProjectColumnInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The id of the column to delete.
+  """
+  columnId: ID! @possibleTypes(concreteTypes: ["ProjectColumn"])
+}
+
+"""
+Autogenerated return type of DeleteProjectColumn
+"""
+type DeleteProjectColumnPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The deleted column ID.
+  """
+  deletedColumnId: ID
+
+  """
+  The project the deleted column was in.
+  """
+  project: Project
+}
+
+"""
+Autogenerated input type of DeleteProject
+"""
+input DeleteProjectInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Project ID to update.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["Project"])
+}
+
+"""
+Autogenerated return type of DeleteProject
+"""
+type DeleteProjectPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The repository or organization the project was removed from.
+  """
+  owner: ProjectOwner
+}
+
+"""
+Autogenerated input type of DeleteProjectV2Field
+"""
+input DeleteProjectV2FieldInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the field to delete.
+  """
+  fieldId: ID!
+    @possibleTypes(
+      concreteTypes: ["ProjectV2Field", "ProjectV2IterationField", "ProjectV2SingleSelectField"]
+      abstractType: "ProjectV2FieldConfiguration"
+    )
+}
+
+"""
+Autogenerated return type of DeleteProjectV2Field
+"""
+type DeleteProjectV2FieldPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The deleted field.
+  """
+  projectV2Field: ProjectV2FieldConfiguration
+}
+
+"""
+Autogenerated input type of DeleteProjectV2
+"""
+input DeleteProjectV2Input {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Project to delete.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated input type of DeleteProjectV2Item
+"""
+input DeleteProjectV2ItemInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the item to be removed.
+  """
+  itemId: ID! @possibleTypes(concreteTypes: ["ProjectV2Item"])
+
+  """
+  The ID of the Project from which the item should be removed.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated return type of DeleteProjectV2Item
+"""
+type DeleteProjectV2ItemPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the deleted item.
+  """
+  deletedItemId: ID
+}
+
+"""
+Autogenerated return type of DeleteProjectV2
+"""
+type DeleteProjectV2Payload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The deleted Project.
+  """
+  projectV2: ProjectV2
+}
+
+"""
+Autogenerated input type of DeleteProjectV2Workflow
+"""
+input DeleteProjectV2WorkflowInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the workflow to be removed.
+  """
+  workflowId: ID! @possibleTypes(concreteTypes: ["ProjectV2Workflow"])
+}
+
+"""
+Autogenerated return type of DeleteProjectV2Workflow
+"""
+type DeleteProjectV2WorkflowPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the deleted workflow.
+  """
+  deletedWorkflowId: ID
+
+  """
+  The project the deleted workflow was in.
+  """
+  projectV2: ProjectV2
+}
+
+"""
+Autogenerated input type of DeletePullRequestReviewComment
+"""
+input DeletePullRequestReviewCommentInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the comment to delete.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["PullRequestReviewComment"])
+}
+
+"""
+Autogenerated return type of DeletePullRequestReviewComment
+"""
+type DeletePullRequestReviewCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request review the deleted comment belonged to.
+  """
+  pullRequestReview: PullRequestReview
+
+  """
+  The deleted pull request review comment.
+  """
+  pullRequestReviewComment: PullRequestReviewComment
+}
+
+"""
+Autogenerated input type of DeletePullRequestReview
+"""
+input DeletePullRequestReviewInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the pull request review to delete.
+  """
+  pullRequestReviewId: ID! @possibleTypes(concreteTypes: ["PullRequestReview"])
+}
+
+"""
+Autogenerated return type of DeletePullRequestReview
+"""
+type DeletePullRequestReviewPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The deleted pull request review.
+  """
+  pullRequestReview: PullRequestReview
+}
+
+"""
+Autogenerated input type of DeleteRef
+"""
+input DeleteRefInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the Ref to be deleted.
+  """
+  refId: ID! @possibleTypes(concreteTypes: ["Ref"])
+}
+
+"""
+Autogenerated return type of DeleteRef
+"""
+type DeleteRefPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of DeleteRepositoryRuleset
+"""
+input DeleteRepositoryRulesetInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The global relay id of the repository ruleset to be deleted.
+  """
+  repositoryRulesetId: ID! @possibleTypes(concreteTypes: ["RepositoryRuleset"])
+}
+
+"""
+Autogenerated return type of DeleteRepositoryRuleset
+"""
+type DeleteRepositoryRulesetPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of DeleteTeamDiscussionComment
+"""
+input DeleteTeamDiscussionCommentInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the comment to delete.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["TeamDiscussionComment"])
+}
+
+"""
+Autogenerated return type of DeleteTeamDiscussionComment
+"""
+type DeleteTeamDiscussionCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of DeleteTeamDiscussion
+"""
+input DeleteTeamDiscussionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The discussion ID to delete.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["TeamDiscussion"])
+}
+
+"""
+Autogenerated return type of DeleteTeamDiscussion
+"""
+type DeleteTeamDiscussionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of DeleteVerifiableDomain
+"""
+input DeleteVerifiableDomainInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the verifiable domain to delete.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["VerifiableDomain"])
+}
+
+"""
+Autogenerated return type of DeleteVerifiableDomain
+"""
+type DeleteVerifiableDomainPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The owning account from which the domain was deleted.
+  """
+  owner: VerifiableDomainOwner
+}
+
+"""
+Represents a 'demilestoned' event on a given issue or pull request.
+"""
+type DemilestonedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the DemilestonedEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the milestone title associated with the 'demilestoned' event.
+  """
+  milestoneTitle: String!
+
+  """
+  Object referenced by event.
+  """
+  subject: MilestoneItem!
+}
+
+"""
+A Dependabot Update for a dependency in a repository
+"""
+type DependabotUpdate implements RepositoryNode {
+  """
+  The error from a dependency update
+  """
+  error: DependabotUpdateError
+
+  """
+  The associated pull request
+  """
+  pullRequest: PullRequest
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+}
+
+"""
+An error produced from a Dependabot Update
+"""
+type DependabotUpdateError {
+  """
+  The body of the error
+  """
+  body: String!
+
+  """
+  The error code
+  """
+  errorType: String!
+
+  """
+  The title of the error
+  """
+  title: String!
+}
+
+"""
+A dependency manifest entry
+"""
+type DependencyGraphDependency @preview(toggledBy: "hawkgirl-preview") {
+  """
+  Does the dependency itself have dependencies?
+  """
+  hasDependencies: Boolean!
+
+  """
+  The original name of the package, as it appears in the manifest.
+  """
+  packageLabel: String!
+    @deprecated(
+      reason: "`packageLabel` will be removed. Use normalized `packageName` field instead. Removal on 2022-10-01 UTC."
+    )
+
+  """
+  The dependency package manager
+  """
+  packageManager: String
+
+  """
+  The name of the package in the canonical form used by the package manager.
+  """
+  packageName: String!
+
+  """
+  The repository containing the package
+  """
+  repository: Repository
+
+  """
+  The dependency version requirements
+  """
+  requirements: String!
+}
+
+"""
+The connection type for DependencyGraphDependency.
+"""
+type DependencyGraphDependencyConnection @preview(toggledBy: "hawkgirl-preview") {
+  """
+  A list of edges.
+  """
+  edges: [DependencyGraphDependencyEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DependencyGraphDependency]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DependencyGraphDependencyEdge @preview(toggledBy: "hawkgirl-preview") {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DependencyGraphDependency
+}
+
+"""
+The possible ecosystems of a dependency graph package.
+"""
+enum DependencyGraphEcosystem {
+  """
+  GitHub Actions
+  """
+  ACTIONS
+
+  """
+  PHP packages hosted at packagist.org
+  """
+  COMPOSER
+
+  """
+  Go modules
+  """
+  GO
+
+  """
+  Java artifacts hosted at the Maven central repository
+  """
+  MAVEN
+
+  """
+  JavaScript packages hosted at npmjs.com
+  """
+  NPM
+
+  """
+  .NET packages hosted at the NuGet Gallery
+  """
+  NUGET
+
+  """
+  Python packages hosted at PyPI.org
+  """
+  PIP
+
+  """
+  Dart packages hosted at pub.dev
+  """
+  PUB
+
+  """
+  Ruby gems hosted at RubyGems.org
+  """
+  RUBYGEMS
+
+  """
+  Rust crates
+  """
+  RUST
+
+  """
+  Swift packages
+  """
+  SWIFT
+}
+
+"""
+Dependency manifest for a repository
+"""
+type DependencyGraphManifest implements Node @preview(toggledBy: "hawkgirl-preview") {
+  """
+  Path to view the manifest file blob
+  """
+  blobPath: String!
+
+  """
+  A list of manifest dependencies
+  """
+  dependencies(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DependencyGraphDependencyConnection
+
+  """
+  The number of dependencies listed in the manifest
+  """
+  dependenciesCount: Int
+
+  """
+  Is the manifest too big to parse?
+  """
+  exceedsMaxSize: Boolean!
+
+  """
+  Fully qualified manifest filename
+  """
+  filename: String!
+
+  """
+  The Node ID of the DependencyGraphManifest object
+  """
+  id: ID!
+
+  """
+  Were we able to parse the manifest?
+  """
+  parseable: Boolean!
+
+  """
+  The repository containing the manifest
+  """
+  repository: Repository!
+}
+
+"""
+The connection type for DependencyGraphManifest.
+"""
+type DependencyGraphManifestConnection @preview(toggledBy: "hawkgirl-preview") {
+  """
+  A list of edges.
+  """
+  edges: [DependencyGraphManifestEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DependencyGraphManifest]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DependencyGraphManifestEdge @preview(toggledBy: "hawkgirl-preview") {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DependencyGraphManifest
+}
+
+"""
+A repository deploy key.
+"""
+type DeployKey implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the DeployKey object
+  """
+  id: ID!
+
+  """
+  The deploy key.
+  """
+  key: String!
+
+  """
+  Whether or not the deploy key is read only.
+  """
+  readOnly: Boolean!
+
+  """
+  The deploy key title.
+  """
+  title: String!
+
+  """
+  Whether or not the deploy key has been verified.
+  """
+  verified: Boolean!
+}
+
+"""
+The connection type for DeployKey.
+"""
+type DeployKeyConnection {
+  """
+  A list of edges.
+  """
+  edges: [DeployKeyEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DeployKey]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DeployKeyEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DeployKey
+}
+
+"""
+Represents a 'deployed' event on a given pull request.
+"""
+type DeployedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The deployment associated with the 'deployed' event.
+  """
+  deployment: Deployment!
+
+  """
+  The Node ID of the DeployedEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+
+  """
+  The ref associated with the 'deployed' event.
+  """
+  ref: Ref
+}
+
+"""
+Represents triggered deployment instance.
+"""
+type Deployment implements Node {
+  """
+  Identifies the commit sha of the deployment.
+  """
+  commit: Commit
+
+  """
+  Identifies the oid of the deployment commit, even if the commit has been deleted.
+  """
+  commitOid: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the actor who triggered the deployment.
+  """
+  creator: Actor!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The deployment description.
+  """
+  description: String
+
+  """
+  The latest environment to which this deployment was made.
+  """
+  environment: String
+
+  """
+  The Node ID of the Deployment object
+  """
+  id: ID!
+
+  """
+  The latest environment to which this deployment was made.
+  """
+  latestEnvironment: String
+
+  """
+  The latest status of this deployment.
+  """
+  latestStatus: DeploymentStatus
+
+  """
+  The original environment to which this deployment was made.
+  """
+  originalEnvironment: String
+
+  """
+  Extra information that a deployment system might need.
+  """
+  payload: String
+
+  """
+  Identifies the Ref of the deployment, if the deployment was created by ref.
+  """
+  ref: Ref
+
+  """
+  Identifies the repository associated with the deployment.
+  """
+  repository: Repository!
+
+  """
+  The current state of the deployment.
+  """
+  state: DeploymentState
+
+  """
+  A list of statuses associated with the deployment.
+  """
+  statuses(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DeploymentStatusConnection
+
+  """
+  The deployment task.
+  """
+  task: String
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for Deployment.
+"""
+type DeploymentConnection {
+  """
+  A list of edges.
+  """
+  edges: [DeploymentEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Deployment]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DeploymentEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Deployment
+}
+
+"""
+Represents a 'deployment_environment_changed' event on a given pull request.
+"""
+type DeploymentEnvironmentChangedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The deployment status that updated the deployment environment.
+  """
+  deploymentStatus: DeploymentStatus!
+
+  """
+  The Node ID of the DeploymentEnvironmentChangedEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+}
+
+"""
+Ordering options for deployment connections
+"""
+input DeploymentOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order deployments by.
+  """
+  field: DeploymentOrderField!
+}
+
+"""
+Properties by which deployment connections can be ordered.
+"""
+enum DeploymentOrderField {
+  """
+  Order collection by creation time
+  """
+  CREATED_AT
+}
+
+"""
+A protection rule.
+"""
+type DeploymentProtectionRule {
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  Whether deployments to this environment can be approved by the user who created the deployment.
+  """
+  preventSelfReview: Boolean
+
+  """
+  The teams or users that can review the deployment
+  """
+  reviewers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DeploymentReviewerConnection!
+
+  """
+  The timeout in minutes for this protection rule.
+  """
+  timeout: Int!
+
+  """
+  The type of protection rule.
+  """
+  type: DeploymentProtectionRuleType!
+}
+
+"""
+The connection type for DeploymentProtectionRule.
+"""
+type DeploymentProtectionRuleConnection {
+  """
+  A list of edges.
+  """
+  edges: [DeploymentProtectionRuleEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DeploymentProtectionRule]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DeploymentProtectionRuleEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DeploymentProtectionRule
+}
+
+"""
+The possible protection rule types.
+"""
+enum DeploymentProtectionRuleType {
+  """
+  Required reviewers
+  """
+  REQUIRED_REVIEWERS
+
+  """
+  Wait timer
+  """
+  WAIT_TIMER
+}
+
+"""
+A request to deploy a workflow run to an environment.
+"""
+type DeploymentRequest {
+  """
+  Whether or not the current user can approve the deployment
+  """
+  currentUserCanApprove: Boolean!
+
+  """
+  The target environment of the deployment
+  """
+  environment: Environment!
+
+  """
+  The teams or users that can review the deployment
+  """
+  reviewers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DeploymentReviewerConnection!
+
+  """
+  The wait timer in minutes configured in the environment
+  """
+  waitTimer: Int!
+
+  """
+  The wait timer in minutes configured in the environment
+  """
+  waitTimerStartedAt: DateTime
+}
+
+"""
+The connection type for DeploymentRequest.
+"""
+type DeploymentRequestConnection {
+  """
+  A list of edges.
+  """
+  edges: [DeploymentRequestEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DeploymentRequest]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DeploymentRequestEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DeploymentRequest
+}
+
+"""
+A deployment review.
+"""
+type DeploymentReview implements Node {
+  """
+  The comment the user left.
+  """
+  comment: String!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The environments approved or rejected
+  """
+  environments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): EnvironmentConnection!
+
+  """
+  The Node ID of the DeploymentReview object
+  """
+  id: ID!
+
+  """
+  The decision of the user.
+  """
+  state: DeploymentReviewState!
+
+  """
+  The user that reviewed the deployment.
+  """
+  user: User!
+}
+
+"""
+The connection type for DeploymentReview.
+"""
+type DeploymentReviewConnection {
+  """
+  A list of edges.
+  """
+  edges: [DeploymentReviewEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DeploymentReview]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DeploymentReviewEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DeploymentReview
+}
+
+"""
+The possible states for a deployment review.
+"""
+enum DeploymentReviewState {
+  """
+  The deployment was approved.
+  """
+  APPROVED
+
+  """
+  The deployment was rejected.
+  """
+  REJECTED
+}
+
+"""
+Users and teams.
+"""
+union DeploymentReviewer = Team | User
+
+"""
+The connection type for DeploymentReviewer.
+"""
+type DeploymentReviewerConnection {
+  """
+  A list of edges.
+  """
+  edges: [DeploymentReviewerEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DeploymentReviewer]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DeploymentReviewerEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DeploymentReviewer
+}
+
+"""
+The possible states in which a deployment can be.
+"""
+enum DeploymentState {
+  """
+  The pending deployment was not updated after 30 minutes.
+  """
+  ABANDONED
+
+  """
+  The deployment is currently active.
+  """
+  ACTIVE
+
+  """
+  An inactive transient deployment.
+  """
+  DESTROYED
+
+  """
+  The deployment experienced an error.
+  """
+  ERROR
+
+  """
+  The deployment has failed.
+  """
+  FAILURE
+
+  """
+  The deployment is inactive.
+  """
+  INACTIVE
+
+  """
+  The deployment is in progress.
+  """
+  IN_PROGRESS
+
+  """
+  The deployment is pending.
+  """
+  PENDING
+
+  """
+  The deployment has queued
+  """
+  QUEUED
+
+  """
+  The deployment was successful.
+  """
+  SUCCESS
+
+  """
+  The deployment is waiting.
+  """
+  WAITING
+}
+
+"""
+Describes the status of a given deployment attempt.
+"""
+type DeploymentStatus implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the actor who triggered the deployment.
+  """
+  creator: Actor!
+
+  """
+  Identifies the deployment associated with status.
+  """
+  deployment: Deployment!
+
+  """
+  Identifies the description of the deployment.
+  """
+  description: String
+
+  """
+  Identifies the environment of the deployment at the time of this deployment status
+  """
+  environment: String @preview(toggledBy: "flash-preview")
+
+  """
+  Identifies the environment URL of the deployment.
+  """
+  environmentUrl: URI
+
+  """
+  The Node ID of the DeploymentStatus object
+  """
+  id: ID!
+
+  """
+  Identifies the log URL of the deployment.
+  """
+  logUrl: URI
+
+  """
+  Identifies the current state of the deployment.
+  """
+  state: DeploymentStatusState!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for DeploymentStatus.
+"""
+type DeploymentStatusConnection {
+  """
+  A list of edges.
+  """
+  edges: [DeploymentStatusEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DeploymentStatus]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DeploymentStatusEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DeploymentStatus
+}
+
+"""
+The possible states for a deployment status.
+"""
+enum DeploymentStatusState {
+  """
+  The deployment experienced an error.
+  """
+  ERROR
+
+  """
+  The deployment has failed.
+  """
+  FAILURE
+
+  """
+  The deployment is inactive.
+  """
+  INACTIVE
+
+  """
+  The deployment is in progress.
+  """
+  IN_PROGRESS
+
+  """
+  The deployment is pending.
+  """
+  PENDING
+
+  """
+  The deployment is queued
+  """
+  QUEUED
+
+  """
+  The deployment was successful.
+  """
+  SUCCESS
+
+  """
+  The deployment is waiting.
+  """
+  WAITING
+}
+
+"""
+Autogenerated input type of DequeuePullRequest
+"""
+input DequeuePullRequestInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the pull request to be dequeued.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of DequeuePullRequest
+"""
+type DequeuePullRequestPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The merge queue entry of the dequeued pull request.
+  """
+  mergeQueueEntry: MergeQueueEntry
+}
+
+"""
+The possible sides of a diff.
+"""
+enum DiffSide {
+  """
+  The left side of the diff.
+  """
+  LEFT
+
+  """
+  The right side of the diff.
+  """
+  RIGHT
+}
+
+"""
+Autogenerated input type of DisablePullRequestAutoMerge
+"""
+input DisablePullRequestAutoMergeInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the pull request to disable auto merge on.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of DisablePullRequestAutoMerge
+"""
+type DisablePullRequestAutoMergePayload {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request auto merge was disabled on.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents a 'disconnected' event on a given issue or pull request.
+"""
+type DisconnectedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the DisconnectedEvent object
+  """
+  id: ID!
+
+  """
+  Reference originated in a different repository.
+  """
+  isCrossRepository: Boolean!
+
+  """
+  Issue or pull request from which the issue was disconnected.
+  """
+  source: ReferencedSubject!
+
+  """
+  Issue or pull request which was disconnected.
+  """
+  subject: ReferencedSubject!
+}
+
+"""
+A discussion in a repository.
+"""
+type Discussion implements Closable & Comment & Deletable & Labelable & Lockable & Node & Reactable & RepositoryNode & Subscribable & Updatable & Votable {
+  """
+  Reason that the conversation was locked.
+  """
+  activeLockReason: LockReason
+
+  """
+  The comment chosen as this discussion's answer, if any.
+  """
+  answer: DiscussionComment
+
+  """
+  The time when a user chose this discussion's answer, if answered.
+  """
+  answerChosenAt: DateTime
+
+  """
+  The user who chose this discussion's answer, if answered.
+  """
+  answerChosenBy: Actor
+
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the subject of the comment.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  The main text of the discussion post.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body rendered to text.
+  """
+  bodyText: String!
+
+  """
+  The category for this discussion.
+  """
+  category: DiscussionCategory!
+
+  """
+  Indicates if the object is closed (definition of closed may depend on type)
+  """
+  closed: Boolean!
+
+  """
+  Identifies the date and time when the object was closed.
+  """
+  closedAt: DateTime
+
+  """
+  The replies to the discussion.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DiscussionCommentConnection!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  The Node ID of the Discussion object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  Only return answered/unanswered discussions
+  """
+  isAnswered: Boolean
+
+  """
+  A list of labels associated with the object.
+  """
+  labels(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for labels returned from the connection.
+    """
+    orderBy: LabelOrder = {field: CREATED_AT, direction: ASC}
+  ): LabelConnection
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  `true` if the object is locked
+  """
+  locked: Boolean!
+
+  """
+  The number identifying this discussion within the repository.
+  """
+  number: Int!
+
+  """
+  The poll associated with this discussion, if one exists.
+  """
+  poll: DiscussionPoll
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+
+  """
+  The path for this discussion.
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the reason for the discussion's state.
+  """
+  stateReason: DiscussionStateReason
+
+  """
+  The title of this discussion.
+  """
+  title: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  Number of upvotes that this subject has received.
+  """
+  upvoteCount: Int!
+
+  """
+  The URL for this discussion.
+  """
+  url: URI!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Indicates if the object can be closed by the viewer.
+  """
+  viewerCanClose: Boolean!
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Indicates if the object can be reopened by the viewer.
+  """
+  viewerCanReopen: Boolean!
+
+  """
+  Check if the viewer is able to change their subscription status for the repository.
+  """
+  viewerCanSubscribe: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Whether or not the current user can add or remove an upvote on this subject.
+  """
+  viewerCanUpvote: Boolean!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+
+  """
+  Whether or not the current user has already upvoted this subject.
+  """
+  viewerHasUpvoted: Boolean!
+
+  """
+  Identifies if the viewer is watching, not watching, or ignoring the subscribable entity.
+  """
+  viewerSubscription: SubscriptionState
+}
+
+"""
+A category for discussions in a repository.
+"""
+type DiscussionCategory implements Node & RepositoryNode {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  A description of this category.
+  """
+  description: String
+
+  """
+  An emoji representing this category.
+  """
+  emoji: String!
+
+  """
+  This category's emoji rendered as HTML.
+  """
+  emojiHTML: HTML!
+
+  """
+  The Node ID of the DiscussionCategory object
+  """
+  id: ID!
+
+  """
+  Whether or not discussions in this category support choosing an answer with the markDiscussionCommentAsAnswer mutation.
+  """
+  isAnswerable: Boolean!
+
+  """
+  The name of this category.
+  """
+  name: String!
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+
+  """
+  The slug of this category.
+  """
+  slug: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for DiscussionCategory.
+"""
+type DiscussionCategoryConnection {
+  """
+  A list of edges.
+  """
+  edges: [DiscussionCategoryEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DiscussionCategory]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DiscussionCategoryEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DiscussionCategory
+}
+
+"""
+The possible reasons for closing a discussion.
+"""
+enum DiscussionCloseReason {
+  """
+  The discussion is a duplicate of another
+  """
+  DUPLICATE
+
+  """
+  The discussion is no longer relevant
+  """
+  OUTDATED
+
+  """
+  The discussion has been resolved
+  """
+  RESOLVED
+}
+
+"""
+A comment on a discussion.
+"""
+type DiscussionComment implements Comment & Deletable & Minimizable & Node & Reactable & Updatable & UpdatableComment & Votable {
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the subject of the comment.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  The body as Markdown.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body rendered to text.
+  """
+  bodyText: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The time when this replied-to comment was deleted
+  """
+  deletedAt: DateTime
+
+  """
+  The discussion this comment was created in
+  """
+  discussion: Discussion
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  The Node ID of the DiscussionComment object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  Has this comment been chosen as the answer of its discussion?
+  """
+  isAnswer: Boolean!
+
+  """
+  Returns whether or not a comment has been minimized.
+  """
+  isMinimized: Boolean!
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  Returns why the comment was minimized. One of `abuse`, `off-topic`,
+  `outdated`, `resolved`, `duplicate` and `spam`. Note that the case and
+  formatting of these values differs from the inputs to the `MinimizeComment` mutation.
+  """
+  minimizedReason: String
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The threaded replies to this comment.
+  """
+  replies(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DiscussionCommentConnection!
+
+  """
+  The discussion comment this comment is a reply to
+  """
+  replyTo: DiscussionComment
+
+  """
+  The path for this discussion comment.
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  Number of upvotes that this subject has received.
+  """
+  upvoteCount: Int!
+
+  """
+  The URL for this discussion comment.
+  """
+  url: URI!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Can the current user mark this comment as an answer?
+  """
+  viewerCanMarkAsAnswer: Boolean!
+
+  """
+  Check if the current viewer can minimize this object.
+  """
+  viewerCanMinimize: Boolean!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Can the current user unmark this comment as an answer?
+  """
+  viewerCanUnmarkAsAnswer: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Whether or not the current user can add or remove an upvote on this subject.
+  """
+  viewerCanUpvote: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+
+  """
+  Whether or not the current user has already upvoted this subject.
+  """
+  viewerHasUpvoted: Boolean!
+}
+
+"""
+The connection type for DiscussionComment.
+"""
+type DiscussionCommentConnection {
+  """
+  A list of edges.
+  """
+  edges: [DiscussionCommentEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DiscussionComment]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DiscussionCommentEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DiscussionComment
+}
+
+"""
+The connection type for Discussion.
+"""
+type DiscussionConnection {
+  """
+  A list of edges.
+  """
+  edges: [DiscussionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Discussion]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DiscussionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Discussion
+}
+
+"""
+Ways in which lists of discussions can be ordered upon return.
+"""
+input DiscussionOrder {
+  """
+  The direction in which to order discussions by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field by which to order discussions.
+  """
+  field: DiscussionOrderField!
+}
+
+"""
+Properties by which discussion connections can be ordered.
+"""
+enum DiscussionOrderField {
+  """
+  Order discussions by creation time.
+  """
+  CREATED_AT
+
+  """
+  Order discussions by most recent modification time.
+  """
+  UPDATED_AT
+}
+
+"""
+A poll for a discussion.
+"""
+type DiscussionPoll implements Node {
+  """
+  The discussion that this poll belongs to.
+  """
+  discussion: Discussion
+
+  """
+  The Node ID of the DiscussionPoll object
+  """
+  id: ID!
+
+  """
+  The options for this poll.
+  """
+  options(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    How to order the options for the discussion poll.
+    """
+    orderBy: DiscussionPollOptionOrder = {field: AUTHORED_ORDER, direction: ASC}
+  ): DiscussionPollOptionConnection
+
+  """
+  The question that is being asked by this poll.
+  """
+  question: String!
+
+  """
+  The total number of votes that have been cast for this poll.
+  """
+  totalVoteCount: Int!
+
+  """
+  Indicates if the viewer has permission to vote in this poll.
+  """
+  viewerCanVote: Boolean!
+
+  """
+  Indicates if the viewer has voted for any option in this poll.
+  """
+  viewerHasVoted: Boolean!
+}
+
+"""
+An option for a discussion poll.
+"""
+type DiscussionPollOption implements Node {
+  """
+  The Node ID of the DiscussionPollOption object
+  """
+  id: ID!
+
+  """
+  The text for this option.
+  """
+  option: String!
+
+  """
+  The discussion poll that this option belongs to.
+  """
+  poll: DiscussionPoll
+
+  """
+  The total number of votes that have been cast for this option.
+  """
+  totalVoteCount: Int!
+
+  """
+  Indicates if the viewer has voted for this option in the poll.
+  """
+  viewerHasVoted: Boolean!
+}
+
+"""
+The connection type for DiscussionPollOption.
+"""
+type DiscussionPollOptionConnection {
+  """
+  A list of edges.
+  """
+  edges: [DiscussionPollOptionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [DiscussionPollOption]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type DiscussionPollOptionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: DiscussionPollOption
+}
+
+"""
+Ordering options for discussion poll option connections.
+"""
+input DiscussionPollOptionOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order poll options by.
+  """
+  field: DiscussionPollOptionOrderField!
+}
+
+"""
+Properties by which discussion poll option connections can be ordered.
+"""
+enum DiscussionPollOptionOrderField {
+  """
+  Order poll options by the order that the poll author specified when creating the poll.
+  """
+  AUTHORED_ORDER
+
+  """
+  Order poll options by the number of votes it has.
+  """
+  VOTE_COUNT
+}
+
+"""
+The possible states of a discussion.
+"""
+enum DiscussionState {
+  """
+  A discussion that has been closed
+  """
+  CLOSED
+
+  """
+  A discussion that is open
+  """
+  OPEN
+}
+
+"""
+The possible state reasons of a discussion.
+"""
+enum DiscussionStateReason {
+  """
+  The discussion is a duplicate of another
+  """
+  DUPLICATE
+
+  """
+  The discussion is no longer relevant
+  """
+  OUTDATED
+
+  """
+  The discussion was reopened
+  """
+  REOPENED
+
+  """
+  The discussion has been resolved
+  """
+  RESOLVED
+}
+
+"""
+Autogenerated input type of DismissPullRequestReview
+"""
+input DismissPullRequestReviewInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The contents of the pull request review dismissal message.
+  """
+  message: String!
+
+  """
+  The Node ID of the pull request review to modify.
+  """
+  pullRequestReviewId: ID! @possibleTypes(concreteTypes: ["PullRequestReview"])
+}
+
+"""
+Autogenerated return type of DismissPullRequestReview
+"""
+type DismissPullRequestReviewPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The dismissed pull request review.
+  """
+  pullRequestReview: PullRequestReview
+}
+
+"""
+The possible reasons that a Dependabot alert was dismissed.
+"""
+enum DismissReason {
+  """
+  A fix has already been started
+  """
+  FIX_STARTED
+
+  """
+  This alert is inaccurate or incorrect
+  """
+  INACCURATE
+
+  """
+  Vulnerable code is not actually used
+  """
+  NOT_USED
+
+  """
+  No bandwidth to fix this
+  """
+  NO_BANDWIDTH
+
+  """
+  Risk is tolerable to this project
+  """
+  TOLERABLE_RISK
+}
+
+"""
+Autogenerated input type of DismissRepositoryVulnerabilityAlert
+"""
+input DismissRepositoryVulnerabilityAlertInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The reason the Dependabot alert is being dismissed.
+  """
+  dismissReason: DismissReason!
+
+  """
+  The Dependabot alert ID to dismiss.
+  """
+  repositoryVulnerabilityAlertId: ID! @possibleTypes(concreteTypes: ["RepositoryVulnerabilityAlert"])
+}
+
+"""
+Autogenerated return type of DismissRepositoryVulnerabilityAlert
+"""
+type DismissRepositoryVulnerabilityAlertPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Dependabot alert that was dismissed
+  """
+  repositoryVulnerabilityAlert: RepositoryVulnerabilityAlert
+}
+
+"""
+A draft issue within a project.
+"""
+type DraftIssue implements Node {
+  """
+  A list of users to assigned to this draft issue.
+  """
+  assignees(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+
+  """
+  The body of the draft issue.
+  """
+  body: String!
+
+  """
+  The body of the draft issue rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body of the draft issue rendered to text.
+  """
+  bodyText: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created this draft issue.
+  """
+  creator: Actor
+
+  """
+  The Node ID of the DraftIssue object
+  """
+  id: ID!
+
+  """
+  List of items linked with the draft issue (currently draft issue can be linked to only one item).
+  """
+  projectV2Items(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2ItemConnection!
+
+  """
+  Projects that link to this draft issue (currently draft issue can be linked to only one project).
+  """
+  projectsV2(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2Connection!
+
+  """
+  The title of the draft issue
+  """
+  title: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+Specifies a review comment to be left with a Pull Request Review.
+"""
+input DraftPullRequestReviewComment {
+  """
+  Body of the comment to leave.
+  """
+  body: String!
+
+  """
+  Path to the file being commented on.
+  """
+  path: String!
+
+  """
+  Position in the file to leave a comment on.
+  """
+  position: Int!
+}
+
+"""
+Specifies a review comment thread to be left with a Pull Request Review.
+"""
+input DraftPullRequestReviewThread {
+  """
+  Body of the comment to leave.
+  """
+  body: String!
+
+  """
+  The line of the blob to which the thread refers. The end of the line range for multi-line comments.
+  """
+  line: Int!
+
+  """
+  Path to the file being commented on.
+  """
+  path: String!
+
+  """
+  The side of the diff on which the line resides. For multi-line comments, this is the side for the end of the line range.
+  """
+  side: DiffSide = RIGHT
+
+  """
+  The first line of the range to which the comment refers.
+  """
+  startLine: Int
+
+  """
+  The side of the diff on which the start line resides.
+  """
+  startSide: DiffSide = RIGHT
+}
+
+"""
+Autogenerated input type of EnablePullRequestAutoMerge
+"""
+input EnablePullRequestAutoMergeInput {
+  """
+  The email address to associate with this merge.
+  """
+  authorEmail: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Commit body to use for the commit when the PR is mergeable; if omitted, a
+  default message will be used. NOTE: when merging with a merge queue any input
+  value for commit message is ignored.
+  """
+  commitBody: String
+
+  """
+  Commit headline to use for the commit when the PR is mergeable; if omitted, a
+  default message will be used. NOTE: when merging with a merge queue any input
+  value for commit headline is ignored.
+  """
+  commitHeadline: String
+
+  """
+  The expected head OID of the pull request.
+  """
+  expectedHeadOid: GitObjectID
+
+  """
+  The merge method to use. If omitted, defaults to `MERGE`. NOTE: when merging
+  with a merge queue any input value for merge method is ignored.
+  """
+  mergeMethod: PullRequestMergeMethod = MERGE
+
+  """
+  ID of the pull request to enable auto-merge on.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of EnablePullRequestAutoMerge
+"""
+type EnablePullRequestAutoMergePayload {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request auto-merge was enabled on.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Autogenerated input type of EnqueuePullRequest
+"""
+input EnqueuePullRequestInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The expected head OID of the pull request.
+  """
+  expectedHeadOid: GitObjectID
+
+  """
+  Add the pull request to the front of the queue.
+  """
+  jump: Boolean
+
+  """
+  The ID of the pull request to enqueue.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of EnqueuePullRequest
+"""
+type EnqueuePullRequestPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The merge queue entry for the enqueued pull request.
+  """
+  mergeQueueEntry: MergeQueueEntry
+}
+
+"""
+An account to manage multiple organizations with consolidated policy and billing.
+"""
+type Enterprise implements AnnouncementBanner & Node {
+  """
+  The text of the announcement
+  """
+  announcement: String
+
+  """
+  The expiration date of the announcement, if any
+  """
+  announcementExpiresAt: DateTime
+
+  """
+  Whether the announcement can be dismissed by the user
+  """
+  announcementUserDismissible: Boolean
+
+  """
+  A URL pointing to the enterprise's public avatar.
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  Enterprise billing information visible to enterprise billing managers.
+  """
+  billingInfo: EnterpriseBillingInfo
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The description of the enterprise.
+  """
+  description: String
+
+  """
+  The description of the enterprise as HTML.
+  """
+  descriptionHTML: HTML!
+
+  """
+  The Node ID of the Enterprise object
+  """
+  id: ID!
+
+  """
+  The location of the enterprise.
+  """
+  location: String
+
+  """
+  A list of users who are members of this enterprise.
+  """
+  members(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Only return members within the selected GitHub Enterprise deployment
+    """
+    deployment: EnterpriseUserDeployment
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Only return members with this two-factor authentication status. Does not
+    include members who only have an account on a GitHub Enterprise Server instance.
+    """
+    hasTwoFactorEnabled: Boolean = null
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for members returned from the connection.
+    """
+    orderBy: EnterpriseMemberOrder = {field: LOGIN, direction: ASC}
+
+    """
+    Only return members within the organizations with these logins
+    """
+    organizationLogins: [String!]
+
+    """
+    The search string to look for.
+    """
+    query: String
+
+    """
+    The role of the user in the enterprise organization or server.
+    """
+    role: EnterpriseUserAccountMembershipRole
+  ): EnterpriseMemberConnection!
+
+  """
+  The name of the enterprise.
+  """
+  name: String!
+
+  """
+  A list of organizations that belong to this enterprise.
+  """
+  organizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations returned from the connection.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The search string to look for.
+    """
+    query: String
+
+    """
+    The viewer's role in an organization.
+    """
+    viewerOrganizationRole: RoleInOrganization
+  ): OrganizationConnection!
+
+  """
+  Enterprise information visible to enterprise owners or enterprise owners'
+  personal access tokens (classic) with read:enterprise or admin:enterprise scope.
+  """
+  ownerInfo: EnterpriseOwnerInfo
+
+  """
+  The HTTP path for this enterprise.
+  """
+  resourcePath: URI!
+
+  """
+  The URL-friendly identifier for the enterprise.
+  """
+  slug: String!
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  url: URI!
+
+  """
+  Is the current viewer an admin of this enterprise?
+  """
+  viewerIsAdmin: Boolean!
+
+  """
+  The URL of the enterprise website.
+  """
+  websiteUrl: URI
+}
+
+"""
+The connection type for User.
+"""
+type EnterpriseAdministratorConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseAdministratorEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+A User who is an administrator of an enterprise.
+"""
+type EnterpriseAdministratorEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: User
+
+  """
+  The role of the administrator.
+  """
+  role: EnterpriseAdministratorRole!
+}
+
+"""
+An invitation for a user to become an owner or billing manager of an enterprise.
+"""
+type EnterpriseAdministratorInvitation implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The email of the person who was invited to the enterprise.
+  """
+  email: String
+
+  """
+  The enterprise the invitation is for.
+  """
+  enterprise: Enterprise!
+
+  """
+  The Node ID of the EnterpriseAdministratorInvitation object
+  """
+  id: ID!
+
+  """
+  The user who was invited to the enterprise.
+  """
+  invitee: User
+
+  """
+  The user who created the invitation.
+  """
+  inviter: User
+
+  """
+  The invitee's pending role in the enterprise (owner or billing_manager).
+  """
+  role: EnterpriseAdministratorRole!
+}
+
+"""
+The connection type for EnterpriseAdministratorInvitation.
+"""
+type EnterpriseAdministratorInvitationConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseAdministratorInvitationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [EnterpriseAdministratorInvitation]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type EnterpriseAdministratorInvitationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: EnterpriseAdministratorInvitation
+}
+
+"""
+Ordering options for enterprise administrator invitation connections
+"""
+input EnterpriseAdministratorInvitationOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order enterprise administrator invitations by.
+  """
+  field: EnterpriseAdministratorInvitationOrderField!
+}
+
+"""
+Properties by which enterprise administrator invitation connections can be ordered.
+"""
+enum EnterpriseAdministratorInvitationOrderField {
+  """
+  Order enterprise administrator member invitations by creation time
+  """
+  CREATED_AT
+}
+
+"""
+The possible administrator roles in an enterprise account.
+"""
+enum EnterpriseAdministratorRole {
+  """
+  Represents a billing manager of the enterprise account.
+  """
+  BILLING_MANAGER
+
+  """
+  Represents an owner of the enterprise account.
+  """
+  OWNER
+}
+
+"""
+The possible values for the enterprise allow private repository forking policy value.
+"""
+enum EnterpriseAllowPrivateRepositoryForkingPolicyValue {
+  """
+  Members can fork a repository to an organization within this enterprise.
+  """
+  ENTERPRISE_ORGANIZATIONS
+
+  """
+  Members can fork a repository to their enterprise-managed user account or an organization inside this enterprise.
+  """
+  ENTERPRISE_ORGANIZATIONS_USER_ACCOUNTS
+
+  """
+  Members can fork a repository to their user account or an organization, either inside or outside of this enterprise.
+  """
+  EVERYWHERE
+
+  """
+  Members can fork a repository only within the same organization (intra-org).
+  """
+  SAME_ORGANIZATION
+
+  """
+  Members can fork a repository to their user account or within the same organization.
+  """
+  SAME_ORGANIZATION_USER_ACCOUNTS
+
+  """
+  Members can fork a repository to their user account.
+  """
+  USER_ACCOUNTS
+}
+
+"""
+Metadata for an audit entry containing enterprise account information.
+"""
+interface EnterpriseAuditEntryData {
+  """
+  The HTTP path for this enterprise.
+  """
+  enterpriseResourcePath: URI
+
+  """
+  The slug of the enterprise.
+  """
+  enterpriseSlug: String
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  enterpriseUrl: URI
+}
+
+"""
+Enterprise billing information visible to enterprise billing managers and owners.
+"""
+type EnterpriseBillingInfo {
+  """
+  The number of licenseable users/emails across the enterprise.
+  """
+  allLicensableUsersCount: Int!
+
+  """
+  The number of data packs used by all organizations owned by the enterprise.
+  """
+  assetPacks: Int!
+
+  """
+  The bandwidth quota in GB for all organizations owned by the enterprise.
+  """
+  bandwidthQuota: Float!
+
+  """
+  The bandwidth usage in GB for all organizations owned by the enterprise.
+  """
+  bandwidthUsage: Float!
+
+  """
+  The bandwidth usage as a percentage of the bandwidth quota.
+  """
+  bandwidthUsagePercentage: Int!
+
+  """
+  The storage quota in GB for all organizations owned by the enterprise.
+  """
+  storageQuota: Float!
+
+  """
+  The storage usage in GB for all organizations owned by the enterprise.
+  """
+  storageUsage: Float!
+
+  """
+  The storage usage as a percentage of the storage quota.
+  """
+  storageUsagePercentage: Int!
+
+  """
+  The number of available licenses across all owned organizations based on the unique number of billable users.
+  """
+  totalAvailableLicenses: Int!
+
+  """
+  The total number of licenses allocated.
+  """
+  totalLicenses: Int!
+}
+
+"""
+The connection type for Enterprise.
+"""
+type EnterpriseConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Enterprise]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+The possible values for the enterprise base repository permission setting.
+"""
+enum EnterpriseDefaultRepositoryPermissionSettingValue {
+  """
+  Organization members will be able to clone, pull, push, and add new collaborators to all organization repositories.
+  """
+  ADMIN
+
+  """
+  Organization members will only be able to clone and pull public repositories.
+  """
+  NONE
+
+  """
+  Organizations in the enterprise choose base repository permissions for their members.
+  """
+  NO_POLICY
+
+  """
+  Organization members will be able to clone and pull all organization repositories.
+  """
+  READ
+
+  """
+  Organization members will be able to clone, pull, and push all organization repositories.
+  """
+  WRITE
+}
+
+"""
+An edge in a connection.
+"""
+type EnterpriseEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Enterprise
+}
+
+"""
+The possible values for an enabled/disabled enterprise setting.
+"""
+enum EnterpriseEnabledDisabledSettingValue {
+  """
+  The setting is disabled for organizations in the enterprise.
+  """
+  DISABLED
+
+  """
+  The setting is enabled for organizations in the enterprise.
+  """
+  ENABLED
+
+  """
+  There is no policy set for organizations in the enterprise.
+  """
+  NO_POLICY
+}
+
+"""
+The possible values for an enabled/no policy enterprise setting.
+"""
+enum EnterpriseEnabledSettingValue {
+  """
+  The setting is enabled for organizations in the enterprise.
+  """
+  ENABLED
+
+  """
+  There is no policy set for organizations in the enterprise.
+  """
+  NO_POLICY
+}
+
+"""
+The connection type for OrganizationInvitation.
+"""
+type EnterpriseFailedInvitationConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseFailedInvitationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [OrganizationInvitation]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+
+  """
+  Identifies the total count of unique users in the connection.
+  """
+  totalUniqueUserCount: Int!
+}
+
+"""
+A failed invitation to be a member in an enterprise organization.
+"""
+type EnterpriseFailedInvitationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: OrganizationInvitation
+}
+
+"""
+An identity provider configured to provision identities for an enterprise.
+Visible to enterprise owners or enterprise owners' personal access tokens
+(classic) with read:enterprise or admin:enterprise scope.
+"""
+type EnterpriseIdentityProvider implements Node {
+  """
+  The digest algorithm used to sign SAML requests for the identity provider.
+  """
+  digestMethod: SamlDigestAlgorithm
+
+  """
+  The enterprise this identity provider belongs to.
+  """
+  enterprise: Enterprise
+
+  """
+  ExternalIdentities provisioned by this identity provider.
+  """
+  externalIdentities(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter to external identities with the users login
+    """
+    login: String
+
+    """
+    Filter to external identities with valid org membership only
+    """
+    membersOnly: Boolean
+
+    """
+    Filter to external identities with the users userName/NameID attribute
+    """
+    userName: String
+  ): ExternalIdentityConnection!
+
+  """
+  The Node ID of the EnterpriseIdentityProvider object
+  """
+  id: ID!
+
+  """
+  The x509 certificate used by the identity provider to sign assertions and responses.
+  """
+  idpCertificate: X509Certificate
+
+  """
+  The Issuer Entity ID for the SAML identity provider.
+  """
+  issuer: String
+
+  """
+  Recovery codes that can be used by admins to access the enterprise if the identity provider is unavailable.
+  """
+  recoveryCodes: [String!]
+
+  """
+  The signature algorithm used to sign SAML requests for the identity provider.
+  """
+  signatureMethod: SamlSignatureAlgorithm
+
+  """
+  The URL endpoint for the identity provider's SAML SSO.
+  """
+  ssoUrl: URI
+}
+
+"""
+An object that is a member of an enterprise.
+"""
+union EnterpriseMember = EnterpriseUserAccount | User
+
+"""
+The connection type for EnterpriseMember.
+"""
+type EnterpriseMemberConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseMemberEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [EnterpriseMember]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+A User who is a member of an enterprise through one or more organizations.
+"""
+type EnterpriseMemberEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: EnterpriseMember
+}
+
+"""
+Ordering options for enterprise member connections.
+"""
+input EnterpriseMemberOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order enterprise members by.
+  """
+  field: EnterpriseMemberOrderField!
+}
+
+"""
+Properties by which enterprise member connections can be ordered.
+"""
+enum EnterpriseMemberOrderField {
+  """
+  Order enterprise members by creation time
+  """
+  CREATED_AT
+
+  """
+  Order enterprise members by login
+  """
+  LOGIN
+}
+
+"""
+The possible values for the enterprise members can create repositories setting.
+"""
+enum EnterpriseMembersCanCreateRepositoriesSettingValue {
+  """
+  Members will be able to create public and private repositories.
+  """
+  ALL
+
+  """
+  Members will not be able to create public or private repositories.
+  """
+  DISABLED
+
+  """
+  Organization owners choose whether to allow members to create repositories.
+  """
+  NO_POLICY
+
+  """
+  Members will be able to create only private repositories.
+  """
+  PRIVATE
+
+  """
+  Members will be able to create only public repositories.
+  """
+  PUBLIC
+}
+
+"""
+The possible values for the members can make purchases setting.
+"""
+enum EnterpriseMembersCanMakePurchasesSettingValue {
+  """
+  The setting is disabled for organizations in the enterprise.
+  """
+  DISABLED
+
+  """
+  The setting is enabled for organizations in the enterprise.
+  """
+  ENABLED
+}
+
+"""
+The possible values we have for filtering Platform::Objects::User#enterprises.
+"""
+enum EnterpriseMembershipType {
+  """
+  Returns all enterprises in which the user is an admin.
+  """
+  ADMIN
+
+  """
+  Returns all enterprises in which the user is a member, admin, or billing manager.
+  """
+  ALL
+
+  """
+  Returns all enterprises in which the user is a billing manager.
+  """
+  BILLING_MANAGER
+
+  """
+  Returns all enterprises in which the user is a member of an org that is owned by the enterprise.
+  """
+  ORG_MEMBERSHIP
+}
+
+"""
+Ordering options for enterprises.
+"""
+input EnterpriseOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order enterprises by.
+  """
+  field: EnterpriseOrderField!
+}
+
+"""
+Properties by which enterprise connections can be ordered.
+"""
+enum EnterpriseOrderField {
+  """
+  Order enterprises by name
+  """
+  NAME
+}
+
+"""
+The connection type for Organization.
+"""
+type EnterpriseOrganizationMembershipConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseOrganizationMembershipEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Organization]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An enterprise organization that a user is a member of.
+"""
+type EnterpriseOrganizationMembershipEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Organization
+
+  """
+  The role of the user in the enterprise membership.
+  """
+  role: EnterpriseUserAccountMembershipRole!
+}
+
+"""
+The connection type for User.
+"""
+type EnterpriseOutsideCollaboratorConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseOutsideCollaboratorEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+A User who is an outside collaborator of an enterprise through one or more organizations.
+"""
+type EnterpriseOutsideCollaboratorEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: User
+
+  """
+  The enterprise organization repositories this user is a member of.
+  """
+  repositories(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories.
+    """
+    orderBy: RepositoryOrder = {field: NAME, direction: ASC}
+  ): EnterpriseRepositoryInfoConnection!
+}
+
+"""
+Enterprise information visible to enterprise owners or enterprise owners'
+personal access tokens (classic) with read:enterprise or admin:enterprise scope.
+"""
+type EnterpriseOwnerInfo {
+  """
+  A list of all of the administrators for this enterprise.
+  """
+  admins(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Only return administrators with this two-factor authentication status.
+    """
+    hasTwoFactorEnabled: Boolean = null
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for administrators returned from the connection.
+    """
+    orderBy: EnterpriseMemberOrder = {field: LOGIN, direction: ASC}
+
+    """
+    Only return members within the organizations with these logins
+    """
+    organizationLogins: [String!]
+
+    """
+    The search string to look for.
+    """
+    query: String
+
+    """
+    The role to filter by.
+    """
+    role: EnterpriseAdministratorRole
+  ): EnterpriseAdministratorConnection!
+
+  """
+  A list of users in the enterprise who currently have two-factor authentication disabled.
+  """
+  affiliatedUsersWithTwoFactorDisabled(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+
+  """
+  Whether or not affiliated users with two-factor authentication disabled exist in the enterprise.
+  """
+  affiliatedUsersWithTwoFactorDisabledExist: Boolean!
+
+  """
+  The setting value for whether private repository forking is enabled for repositories in organizations in this enterprise.
+  """
+  allowPrivateRepositoryForkingSetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided private repository forking setting value.
+  """
+  allowPrivateRepositoryForkingSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  The value for the allow private repository forking policy on the enterprise.
+  """
+  allowPrivateRepositoryForkingSettingPolicyValue: EnterpriseAllowPrivateRepositoryForkingPolicyValue
+
+  """
+  The setting value for base repository permissions for organizations in this enterprise.
+  """
+  defaultRepositoryPermissionSetting: EnterpriseDefaultRepositoryPermissionSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided base repository permission.
+  """
+  defaultRepositoryPermissionSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The permission to find organizations for.
+    """
+    value: DefaultRepositoryPermissionField!
+  ): OrganizationConnection!
+
+  """
+  A list of domains owned by the enterprise. Visible to enterprise owners or
+  enterprise owners' personal access tokens (classic) with admin:enterprise scope.
+  """
+  domains(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Filter whether or not the domain is approved.
+    """
+    isApproved: Boolean = null
+
+    """
+    Filter whether or not the domain is verified.
+    """
+    isVerified: Boolean = null
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for verifiable domains returned.
+    """
+    orderBy: VerifiableDomainOrder = {field: DOMAIN, direction: ASC}
+  ): VerifiableDomainConnection!
+
+  """
+  Enterprise Server installations owned by the enterprise.
+  """
+  enterpriseServerInstallations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Whether or not to only return installations discovered via GitHub Connect.
+    """
+    connectedOnly: Boolean = false
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for Enterprise Server installations returned.
+    """
+    orderBy: EnterpriseServerInstallationOrder = {field: HOST_NAME, direction: ASC}
+  ): EnterpriseServerInstallationConnection!
+
+  """
+  A list of failed invitations in the enterprise.
+  """
+  failedInvitations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    The search string to look for.
+    """
+    query: String
+  ): EnterpriseFailedInvitationConnection!
+
+  """
+  The setting value for whether the enterprise has an IP allow list enabled.
+  """
+  ipAllowListEnabledSetting: IpAllowListEnabledSettingValue!
+
+  """
+  The IP addresses that are allowed to access resources owned by the enterprise.
+  Visible to enterprise owners or enterprise owners' personal access tokens
+  (classic) with admin:enterprise scope.
+  """
+  ipAllowListEntries(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for IP allow list entries returned.
+    """
+    orderBy: IpAllowListEntryOrder = {field: ALLOW_LIST_VALUE, direction: ASC}
+  ): IpAllowListEntryConnection!
+
+  """
+  The setting value for whether the enterprise has IP allow list configuration for installed GitHub Apps enabled.
+  """
+  ipAllowListForInstalledAppsEnabledSetting: IpAllowListForInstalledAppsEnabledSettingValue!
+
+  """
+  Whether or not the base repository permission is currently being updated.
+  """
+  isUpdatingDefaultRepositoryPermission: Boolean!
+
+  """
+  Whether the two-factor authentication requirement is currently being enforced.
+  """
+  isUpdatingTwoFactorRequirement: Boolean!
+
+  """
+  The setting value for whether organization members with admin permissions on a
+  repository can change repository visibility.
+  """
+  membersCanChangeRepositoryVisibilitySetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided can change repository visibility setting value.
+  """
+  membersCanChangeRepositoryVisibilitySettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  The setting value for whether members of organizations in the enterprise can create internal repositories.
+  """
+  membersCanCreateInternalRepositoriesSetting: Boolean
+
+  """
+  The setting value for whether members of organizations in the enterprise can create private repositories.
+  """
+  membersCanCreatePrivateRepositoriesSetting: Boolean
+
+  """
+  The setting value for whether members of organizations in the enterprise can create public repositories.
+  """
+  membersCanCreatePublicRepositoriesSetting: Boolean
+
+  """
+  The setting value for whether members of organizations in the enterprise can create repositories.
+  """
+  membersCanCreateRepositoriesSetting: EnterpriseMembersCanCreateRepositoriesSettingValue
+
+  """
+  A list of enterprise organizations configured with the provided repository creation setting value.
+  """
+  membersCanCreateRepositoriesSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting to find organizations for.
+    """
+    value: OrganizationMembersCanCreateRepositoriesSettingValue!
+  ): OrganizationConnection!
+
+  """
+  The setting value for whether members with admin permissions for repositories can delete issues.
+  """
+  membersCanDeleteIssuesSetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided members can delete issues setting value.
+  """
+  membersCanDeleteIssuesSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  The setting value for whether members with admin permissions for repositories can delete or transfer repositories.
+  """
+  membersCanDeleteRepositoriesSetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided members can delete repositories setting value.
+  """
+  membersCanDeleteRepositoriesSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  The setting value for whether members of organizations in the enterprise can invite outside collaborators.
+  """
+  membersCanInviteCollaboratorsSetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided members can invite collaborators setting value.
+  """
+  membersCanInviteCollaboratorsSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  Indicates whether members of this enterprise's organizations can purchase additional services for those organizations.
+  """
+  membersCanMakePurchasesSetting: EnterpriseMembersCanMakePurchasesSettingValue!
+
+  """
+  The setting value for whether members with admin permissions for repositories can update protected branches.
+  """
+  membersCanUpdateProtectedBranchesSetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided members can update protected branches setting value.
+  """
+  membersCanUpdateProtectedBranchesSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  The setting value for whether members can view dependency insights.
+  """
+  membersCanViewDependencyInsightsSetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided members can view dependency insights setting value.
+  """
+  membersCanViewDependencyInsightsSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  Indicates if email notification delivery for this enterprise is restricted to verified or approved domains.
+  """
+  notificationDeliveryRestrictionEnabledSetting: NotificationRestrictionSettingValue!
+
+  """
+  The OIDC Identity Provider for the enterprise.
+  """
+  oidcProvider: OIDCProvider
+
+  """
+  The setting value for whether organization projects are enabled for organizations in this enterprise.
+  """
+  organizationProjectsSetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided organization projects setting value.
+  """
+  organizationProjectsSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  A list of outside collaborators across the repositories in the enterprise.
+  """
+  outsideCollaborators(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Only return outside collaborators with this two-factor authentication status.
+    """
+    hasTwoFactorEnabled: Boolean = null
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    The login of one specific outside collaborator.
+    """
+    login: String
+
+    """
+    Ordering options for outside collaborators returned from the connection.
+    """
+    orderBy: EnterpriseMemberOrder = {field: LOGIN, direction: ASC}
+
+    """
+    Only return outside collaborators within the organizations with these logins
+    """
+    organizationLogins: [String!]
+
+    """
+    The search string to look for.
+    """
+    query: String
+
+    """
+    Only return outside collaborators on repositories with this visibility.
+    """
+    visibility: RepositoryVisibility
+  ): EnterpriseOutsideCollaboratorConnection!
+
+  """
+  A list of pending administrator invitations for the enterprise.
+  """
+  pendingAdminInvitations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pending enterprise administrator invitations returned from the connection.
+    """
+    orderBy: EnterpriseAdministratorInvitationOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    The search string to look for.
+    """
+    query: String
+
+    """
+    The role to filter by.
+    """
+    role: EnterpriseAdministratorRole
+  ): EnterpriseAdministratorInvitationConnection!
+
+  """
+  A list of pending collaborator invitations across the repositories in the enterprise.
+  """
+  pendingCollaboratorInvitations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pending repository collaborator invitations returned from the connection.
+    """
+    orderBy: RepositoryInvitationOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    The search string to look for.
+    """
+    query: String
+  ): RepositoryInvitationConnection!
+
+  """
+  A list of pending member invitations for organizations in the enterprise.
+  """
+  pendingMemberInvitations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Only return invitations matching this invitation source
+    """
+    invitationSource: OrganizationInvitationSource
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Only return invitations within the organizations with these logins
+    """
+    organizationLogins: [String!]
+
+    """
+    The search string to look for.
+    """
+    query: String
+  ): EnterprisePendingMemberInvitationConnection!
+
+  """
+  The setting value for whether repository projects are enabled in this enterprise.
+  """
+  repositoryProjectsSetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided repository projects setting value.
+  """
+  repositoryProjectsSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  The SAML Identity Provider for the enterprise.
+  """
+  samlIdentityProvider: EnterpriseIdentityProvider
+
+  """
+  A list of enterprise organizations configured with the SAML single sign-on setting value.
+  """
+  samlIdentityProviderSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: IdentityProviderConfigurationState!
+  ): OrganizationConnection!
+
+  """
+  A list of members with a support entitlement.
+  """
+  supportEntitlements(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for support entitlement users returned from the connection.
+    """
+    orderBy: EnterpriseMemberOrder = {field: LOGIN, direction: ASC}
+  ): EnterpriseMemberConnection!
+
+  """
+  The setting value for whether team discussions are enabled for organizations in this enterprise.
+  """
+  teamDiscussionsSetting: EnterpriseEnabledDisabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the provided team discussions setting value.
+  """
+  teamDiscussionsSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+
+  """
+  The setting value for whether the enterprise requires two-factor authentication for its organizations and users.
+  """
+  twoFactorRequiredSetting: EnterpriseEnabledSettingValue!
+
+  """
+  A list of enterprise organizations configured with the two-factor authentication setting value.
+  """
+  twoFactorRequiredSettingOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations with this setting.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The setting value to find organizations for.
+    """
+    value: Boolean!
+  ): OrganizationConnection!
+}
+
+"""
+The connection type for OrganizationInvitation.
+"""
+type EnterprisePendingMemberInvitationConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterprisePendingMemberInvitationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [OrganizationInvitation]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+
+  """
+  Identifies the total count of unique users in the connection.
+  """
+  totalUniqueUserCount: Int!
+}
+
+"""
+An invitation to be a member in an enterprise organization.
+"""
+type EnterprisePendingMemberInvitationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: OrganizationInvitation
+}
+
+"""
+A subset of repository information queryable from an enterprise.
+"""
+type EnterpriseRepositoryInfo implements Node {
+  """
+  The Node ID of the EnterpriseRepositoryInfo object
+  """
+  id: ID!
+
+  """
+  Identifies if the repository is private or internal.
+  """
+  isPrivate: Boolean!
+
+  """
+  The repository's name.
+  """
+  name: String!
+
+  """
+  The repository's name with owner.
+  """
+  nameWithOwner: String!
+}
+
+"""
+The connection type for EnterpriseRepositoryInfo.
+"""
+type EnterpriseRepositoryInfoConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseRepositoryInfoEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [EnterpriseRepositoryInfo]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type EnterpriseRepositoryInfoEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: EnterpriseRepositoryInfo
+}
+
+"""
+An Enterprise Server installation.
+"""
+type EnterpriseServerInstallation implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The customer name to which the Enterprise Server installation belongs.
+  """
+  customerName: String!
+
+  """
+  The host name of the Enterprise Server installation.
+  """
+  hostName: String!
+
+  """
+  The Node ID of the EnterpriseServerInstallation object
+  """
+  id: ID!
+
+  """
+  Whether or not the installation is connected to an Enterprise Server installation via GitHub Connect.
+  """
+  isConnected: Boolean!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  User accounts on this Enterprise Server installation.
+  """
+  userAccounts(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for Enterprise Server user accounts returned from the connection.
+    """
+    orderBy: EnterpriseServerUserAccountOrder = {field: LOGIN, direction: ASC}
+  ): EnterpriseServerUserAccountConnection!
+
+  """
+  User accounts uploads for the Enterprise Server installation.
+  """
+  userAccountsUploads(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for Enterprise Server user accounts uploads returned from the connection.
+    """
+    orderBy: EnterpriseServerUserAccountsUploadOrder = {field: CREATED_AT, direction: DESC}
+  ): EnterpriseServerUserAccountsUploadConnection!
+}
+
+"""
+The connection type for EnterpriseServerInstallation.
+"""
+type EnterpriseServerInstallationConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseServerInstallationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [EnterpriseServerInstallation]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type EnterpriseServerInstallationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: EnterpriseServerInstallation
+}
+
+"""
+The connection type for EnterpriseServerInstallation.
+"""
+type EnterpriseServerInstallationMembershipConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseServerInstallationMembershipEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [EnterpriseServerInstallation]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An Enterprise Server installation that a user is a member of.
+"""
+type EnterpriseServerInstallationMembershipEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: EnterpriseServerInstallation
+
+  """
+  The role of the user in the enterprise membership.
+  """
+  role: EnterpriseUserAccountMembershipRole!
+}
+
+"""
+Ordering options for Enterprise Server installation connections.
+"""
+input EnterpriseServerInstallationOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order Enterprise Server installations by.
+  """
+  field: EnterpriseServerInstallationOrderField!
+}
+
+"""
+Properties by which Enterprise Server installation connections can be ordered.
+"""
+enum EnterpriseServerInstallationOrderField {
+  """
+  Order Enterprise Server installations by creation time
+  """
+  CREATED_AT
+
+  """
+  Order Enterprise Server installations by customer name
+  """
+  CUSTOMER_NAME
+
+  """
+  Order Enterprise Server installations by host name
+  """
+  HOST_NAME
+}
+
+"""
+A user account on an Enterprise Server installation.
+"""
+type EnterpriseServerUserAccount implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  User emails belonging to this user account.
+  """
+  emails(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for Enterprise Server user account emails returned from the connection.
+    """
+    orderBy: EnterpriseServerUserAccountEmailOrder = {field: EMAIL, direction: ASC}
+  ): EnterpriseServerUserAccountEmailConnection!
+
+  """
+  The Enterprise Server installation on which this user account exists.
+  """
+  enterpriseServerInstallation: EnterpriseServerInstallation!
+
+  """
+  The Node ID of the EnterpriseServerUserAccount object
+  """
+  id: ID!
+
+  """
+  Whether the user account is a site administrator on the Enterprise Server installation.
+  """
+  isSiteAdmin: Boolean!
+
+  """
+  The login of the user account on the Enterprise Server installation.
+  """
+  login: String!
+
+  """
+  The profile name of the user account on the Enterprise Server installation.
+  """
+  profileName: String
+
+  """
+  The date and time when the user account was created on the Enterprise Server installation.
+  """
+  remoteCreatedAt: DateTime!
+
+  """
+  The ID of the user account on the Enterprise Server installation.
+  """
+  remoteUserId: Int!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for EnterpriseServerUserAccount.
+"""
+type EnterpriseServerUserAccountConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseServerUserAccountEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [EnterpriseServerUserAccount]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type EnterpriseServerUserAccountEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: EnterpriseServerUserAccount
+}
+
+"""
+An email belonging to a user account on an Enterprise Server installation.
+"""
+type EnterpriseServerUserAccountEmail implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The email address.
+  """
+  email: String!
+
+  """
+  The Node ID of the EnterpriseServerUserAccountEmail object
+  """
+  id: ID!
+
+  """
+  Indicates whether this is the primary email of the associated user account.
+  """
+  isPrimary: Boolean!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The user account to which the email belongs.
+  """
+  userAccount: EnterpriseServerUserAccount!
+}
+
+"""
+The connection type for EnterpriseServerUserAccountEmail.
+"""
+type EnterpriseServerUserAccountEmailConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseServerUserAccountEmailEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [EnterpriseServerUserAccountEmail]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type EnterpriseServerUserAccountEmailEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: EnterpriseServerUserAccountEmail
+}
+
+"""
+Ordering options for Enterprise Server user account email connections.
+"""
+input EnterpriseServerUserAccountEmailOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order emails by.
+  """
+  field: EnterpriseServerUserAccountEmailOrderField!
+}
+
+"""
+Properties by which Enterprise Server user account email connections can be ordered.
+"""
+enum EnterpriseServerUserAccountEmailOrderField {
+  """
+  Order emails by email
+  """
+  EMAIL
+}
+
+"""
+Ordering options for Enterprise Server user account connections.
+"""
+input EnterpriseServerUserAccountOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order user accounts by.
+  """
+  field: EnterpriseServerUserAccountOrderField!
+}
+
+"""
+Properties by which Enterprise Server user account connections can be ordered.
+"""
+enum EnterpriseServerUserAccountOrderField {
+  """
+  Order user accounts by login
+  """
+  LOGIN
+
+  """
+  Order user accounts by creation time on the Enterprise Server installation
+  """
+  REMOTE_CREATED_AT
+}
+
+"""
+A user accounts upload from an Enterprise Server installation.
+"""
+type EnterpriseServerUserAccountsUpload implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The enterprise to which this upload belongs.
+  """
+  enterprise: Enterprise!
+
+  """
+  The Enterprise Server installation for which this upload was generated.
+  """
+  enterpriseServerInstallation: EnterpriseServerInstallation!
+
+  """
+  The Node ID of the EnterpriseServerUserAccountsUpload object
+  """
+  id: ID!
+
+  """
+  The name of the file uploaded.
+  """
+  name: String!
+
+  """
+  The synchronization state of the upload
+  """
+  syncState: EnterpriseServerUserAccountsUploadSyncState!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for EnterpriseServerUserAccountsUpload.
+"""
+type EnterpriseServerUserAccountsUploadConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnterpriseServerUserAccountsUploadEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [EnterpriseServerUserAccountsUpload]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type EnterpriseServerUserAccountsUploadEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: EnterpriseServerUserAccountsUpload
+}
+
+"""
+Ordering options for Enterprise Server user accounts upload connections.
+"""
+input EnterpriseServerUserAccountsUploadOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order user accounts uploads by.
+  """
+  field: EnterpriseServerUserAccountsUploadOrderField!
+}
+
+"""
+Properties by which Enterprise Server user accounts upload connections can be ordered.
+"""
+enum EnterpriseServerUserAccountsUploadOrderField {
+  """
+  Order user accounts uploads by creation time
+  """
+  CREATED_AT
+}
+
+"""
+Synchronization state of the Enterprise Server user accounts upload
+"""
+enum EnterpriseServerUserAccountsUploadSyncState {
+  """
+  The synchronization of the upload failed.
+  """
+  FAILURE
+
+  """
+  The synchronization of the upload is pending.
+  """
+  PENDING
+
+  """
+  The synchronization of the upload succeeded.
+  """
+  SUCCESS
+}
+
+"""
+An account for a user who is an admin of an enterprise or a member of an enterprise through one or more organizations.
+"""
+type EnterpriseUserAccount implements Actor & Node {
+  """
+  A URL pointing to the enterprise user account's public avatar.
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The enterprise in which this user account exists.
+  """
+  enterprise: Enterprise!
+
+  """
+  A list of Enterprise Server installations this user is a member of.
+  """
+  enterpriseInstallations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for installations returned from the connection.
+    """
+    orderBy: EnterpriseServerInstallationOrder = {field: HOST_NAME, direction: ASC}
+
+    """
+    The search string to look for.
+    """
+    query: String
+
+    """
+    The role of the user in the installation.
+    """
+    role: EnterpriseUserAccountMembershipRole
+  ): EnterpriseServerInstallationMembershipConnection!
+
+  """
+  The Node ID of the EnterpriseUserAccount object
+  """
+  id: ID!
+
+  """
+  An identifier for the enterprise user account, a login or email address
+  """
+  login: String!
+
+  """
+  The name of the enterprise user account
+  """
+  name: String
+
+  """
+  A list of enterprise organizations this user is a member of.
+  """
+  organizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for organizations returned from the connection.
+    """
+    orderBy: OrganizationOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The search string to look for.
+    """
+    query: String
+
+    """
+    The role of the user in the enterprise organization.
+    """
+    role: EnterpriseUserAccountMembershipRole
+  ): EnterpriseOrganizationMembershipConnection!
+
+  """
+  The HTTP path for this user.
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this user.
+  """
+  url: URI!
+
+  """
+  The user within the enterprise.
+  """
+  user: User
+}
+
+"""
+The possible roles for enterprise membership.
+"""
+enum EnterpriseUserAccountMembershipRole {
+  """
+  The user is a member of an organization in the enterprise.
+  """
+  MEMBER
+
+  """
+  The user is an owner of an organization in the enterprise.
+  """
+  OWNER
+
+  """
+  The user is not an owner of the enterprise, and not a member or owner of any
+  organizations in the enterprise; only for EMU-enabled enterprises.
+  """
+  UNAFFILIATED
+}
+
+"""
+The possible GitHub Enterprise deployments where this user can exist.
+"""
+enum EnterpriseUserDeployment {
+  """
+  The user is part of a GitHub Enterprise Cloud deployment.
+  """
+  CLOUD
+
+  """
+  The user is part of a GitHub Enterprise Server deployment.
+  """
+  SERVER
+}
+
+"""
+An environment.
+"""
+type Environment implements Node {
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the Environment object
+  """
+  id: ID!
+
+  """
+  The name of the environment
+  """
+  name: String!
+
+  """
+  The protection rules defined for this environment
+  """
+  protectionRules(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DeploymentProtectionRuleConnection!
+}
+
+"""
+The connection type for Environment.
+"""
+type EnvironmentConnection {
+  """
+  A list of edges.
+  """
+  edges: [EnvironmentEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Environment]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type EnvironmentEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Environment
+}
+
+"""
+Properties by which environments connections can be ordered
+"""
+enum EnvironmentOrderField {
+  """
+  Order environments by name.
+  """
+  NAME
+}
+
+"""
+Ordering options for environments
+"""
+input Environments {
+  """
+  The direction in which to order environments by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order environments by.
+  """
+  field: EnvironmentOrderField!
+}
+
+"""
+An external identity provisioned by SAML SSO or SCIM. If SAML is configured on
+the organization, the external identity is visible to (1) organization owners,
+(2) organization owners' personal access tokens (classic) with read:org or
+admin:org scope, (3) GitHub App with an installation token with read or write
+access to members. If SAML is configured on the enterprise, the external
+identity is visible to (1) enterprise owners, (2) enterprise owners' personal
+access tokens (classic) with read:enterprise or admin:enterprise scope.
+"""
+type ExternalIdentity implements Node {
+  """
+  The GUID for this identity
+  """
+  guid: String!
+
+  """
+  The Node ID of the ExternalIdentity object
+  """
+  id: ID!
+
+  """
+  Organization invitation for this SCIM-provisioned external identity
+  """
+  organizationInvitation: OrganizationInvitation
+
+  """
+  SAML Identity attributes
+  """
+  samlIdentity: ExternalIdentitySamlAttributes
+
+  """
+  SCIM Identity attributes
+  """
+  scimIdentity: ExternalIdentityScimAttributes
+
+  """
+  User linked to this external identity. Will be NULL if this identity has not been claimed by an organization member.
+  """
+  user: User
+}
+
+"""
+An attribute for the External Identity attributes collection
+"""
+type ExternalIdentityAttribute {
+  """
+  The attribute metadata as JSON
+  """
+  metadata: String
+
+  """
+  The attribute name
+  """
+  name: String!
+
+  """
+  The attribute value
+  """
+  value: String!
+}
+
+"""
+The connection type for ExternalIdentity.
+"""
+type ExternalIdentityConnection {
+  """
+  A list of edges.
+  """
+  edges: [ExternalIdentityEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ExternalIdentity]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ExternalIdentityEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ExternalIdentity
+}
+
+"""
+SAML attributes for the External Identity
+"""
+type ExternalIdentitySamlAttributes {
+  """
+  SAML Identity attributes
+  """
+  attributes: [ExternalIdentityAttribute!]!
+
+  """
+  The emails associated with the SAML identity
+  """
+  emails: [UserEmailMetadata!]
+
+  """
+  Family name of the SAML identity
+  """
+  familyName: String
+
+  """
+  Given name of the SAML identity
+  """
+  givenName: String
+
+  """
+  The groups linked to this identity in IDP
+  """
+  groups: [String!]
+
+  """
+  The NameID of the SAML identity
+  """
+  nameId: String
+
+  """
+  The userName of the SAML identity
+  """
+  username: String
+}
+
+"""
+SCIM attributes for the External Identity
+"""
+type ExternalIdentityScimAttributes {
+  """
+  The emails associated with the SCIM identity
+  """
+  emails: [UserEmailMetadata!]
+
+  """
+  Family name of the SCIM identity
+  """
+  familyName: String
+
+  """
+  Given name of the SCIM identity
+  """
+  givenName: String
+
+  """
+  The groups linked to this identity in IDP
+  """
+  groups: [String!]
+
+  """
+  The userName of the SCIM identity
+  """
+  username: String
+}
+
+"""
+A command to add a file at the given path with the given contents as part of a
+commit.  Any existing file at that that path will be replaced.
+"""
+input FileAddition {
+  """
+  The base64 encoded contents of the file
+  """
+  contents: Base64String!
+
+  """
+  The path in the repository where the file will be located
+  """
+  path: String!
+}
+
+"""
+A description of a set of changes to a file tree to be made as part of
+a git commit, modeled as zero or more file `additions` and zero or more
+file `deletions`.
+
+Both fields are optional; omitting both will produce a commit with no
+file changes.
+
+`deletions` and `additions` describe changes to files identified
+by their path in the git tree using unix-style path separators, i.e.
+`/`.  The root of a git tree is an empty string, so paths are not
+slash-prefixed.
+
+`path` values must be unique across all `additions` and `deletions`
+provided.  Any duplication will result in a validation error.
+
+### Encoding
+
+File contents must be provided in full for each `FileAddition`.
+
+The `contents` of a `FileAddition` must be encoded using RFC 4648
+compliant base64, i.e. correct padding is required and no characters
+outside the standard alphabet may be used.  Invalid base64
+encoding will be rejected with a validation error.
+
+The encoded contents may be binary.
+
+For text files, no assumptions are made about the character encoding of
+the file contents (after base64 decoding).  No charset transcoding or
+line-ending normalization will be performed; it is the client's
+responsibility to manage the character encoding of files they provide.
+However, for maximum compatibility we recommend using UTF-8 encoding
+and ensuring that all files in a repository use a consistent
+line-ending convention (`\n` or `\r\n`), and that all files end
+with a newline.
+
+### Modeling file changes
+
+Each of the the five types of conceptual changes that can be made in a
+git commit can be described using the `FileChanges` type as follows:
+
+1. New file addition: create file `hello world\n` at path `docs/README.txt`:
+
+       {
+         "additions" [
+           {
+             "path": "docs/README.txt",
+             "contents": base64encode("hello world\n")
+           }
+         ]
+       }
+
+2. Existing file modification: change existing `docs/README.txt` to have new
+   content `new content here\n`:
+
+       {
+         "additions" [
+           {
+             "path": "docs/README.txt",
+             "contents": base64encode("new content here\n")
+           }
+         ]
+       }
+
+3. Existing file deletion: remove existing file `docs/README.txt`.
+   Note that the path is required to exist -- specifying a
+   path that does not exist on the given branch will abort the
+   commit and return an error.
+
+       {
+         "deletions" [
+           {
+             "path": "docs/README.txt"
+           }
+         ]
+       }
+
+
+4. File rename with no changes: rename `docs/README.txt` with
+   previous content `hello world\n` to the same content at
+   `newdocs/README.txt`:
+
+       {
+         "deletions" [
+           {
+             "path": "docs/README.txt",
+           }
+         ],
+         "additions" [
+           {
+             "path": "newdocs/README.txt",
+             "contents": base64encode("hello world\n")
+           }
+         ]
+       }
+
+
+5. File rename with changes: rename `docs/README.txt` with
+   previous content `hello world\n` to a file at path
+   `newdocs/README.txt` with content `new contents\n`:
+
+       {
+         "deletions" [
+           {
+             "path": "docs/README.txt",
+           }
+         ],
+         "additions" [
+           {
+             "path": "newdocs/README.txt",
+             "contents": base64encode("new contents\n")
+           }
+         ]
+       }
+"""
+input FileChanges {
+  """
+  File to add or change.
+  """
+  additions: [FileAddition!] = []
+
+  """
+  Files to delete.
+  """
+  deletions: [FileDeletion!] = []
+}
+
+"""
+A command to delete the file at the given path as part of a commit.
+"""
+input FileDeletion {
+  """
+  The path to delete
+  """
+  path: String!
+}
+
+"""
+The possible viewed states of a file .
+"""
+enum FileViewedState {
+  """
+  The file has new changes since last viewed.
+  """
+  DISMISSED
+
+  """
+  The file has not been marked as viewed.
+  """
+  UNVIEWED
+
+  """
+  The file has been marked as viewed.
+  """
+  VIEWED
+}
+
+"""
+Autogenerated input type of FollowOrganization
+"""
+input FollowOrganizationInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the organization to follow.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of FollowOrganization
+"""
+type FollowOrganizationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The organization that was followed.
+  """
+  organization: Organization
+}
+
+"""
+Autogenerated input type of FollowUser
+"""
+input FollowUserInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the user to follow.
+  """
+  userId: ID! @possibleTypes(concreteTypes: ["User"])
+}
+
+"""
+Autogenerated return type of FollowUser
+"""
+type FollowUserPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The user that was followed.
+  """
+  user: User
+}
+
+"""
+The connection type for User.
+"""
+type FollowerConnection {
+  """
+  A list of edges.
+  """
+  edges: [UserEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+The connection type for User.
+"""
+type FollowingConnection {
+  """
+  A list of edges.
+  """
+  edges: [UserEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+A funding platform link for a repository.
+"""
+type FundingLink {
+  """
+  The funding platform this link is for.
+  """
+  platform: FundingPlatform!
+
+  """
+  The configured URL for this funding link.
+  """
+  url: URI!
+}
+
+"""
+The possible funding platforms for repository funding links.
+"""
+enum FundingPlatform {
+  """
+  Community Bridge funding platform.
+  """
+  COMMUNITY_BRIDGE
+
+  """
+  Custom funding platform.
+  """
+  CUSTOM
+
+  """
+  GitHub funding platform.
+  """
+  GITHUB
+
+  """
+  IssueHunt funding platform.
+  """
+  ISSUEHUNT
+
+  """
+  Ko-fi funding platform.
+  """
+  KO_FI
+
+  """
+  LFX Crowdfunding funding platform.
+  """
+  LFX_CROWDFUNDING
+
+  """
+  Liberapay funding platform.
+  """
+  LIBERAPAY
+
+  """
+  Open Collective funding platform.
+  """
+  OPEN_COLLECTIVE
+
+  """
+  Otechie funding platform.
+  """
+  OTECHIE
+
+  """
+  Patreon funding platform.
+  """
+  PATREON
+
+  """
+  Tidelift funding platform.
+  """
+  TIDELIFT
+}
+
+"""
+A generic hovercard context with a message and icon
+"""
+type GenericHovercardContext implements HovercardContext {
+  """
+  A string describing this context
+  """
+  message: String!
+
+  """
+  An octicon to accompany this context
+  """
+  octicon: String!
+}
+
+"""
+A Gist.
+"""
+type Gist implements Node & Starrable & UniformResourceLocatable {
+  """
+  A list of comments associated with the gist
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): GistCommentConnection!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The gist description.
+  """
+  description: String
+
+  """
+  The files in this gist.
+  """
+  files(
+    """
+    The maximum number of files to return.
+    """
+    limit: Int = 10
+
+    """
+    The oid of the files to return
+    """
+    oid: GitObjectID
+  ): [GistFile]
+
+  """
+  A list of forks associated with the gist
+  """
+  forks(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for gists returned from the connection
+    """
+    orderBy: GistOrder
+  ): GistConnection!
+
+  """
+  The Node ID of the Gist object
+  """
+  id: ID!
+
+  """
+  Identifies if the gist is a fork.
+  """
+  isFork: Boolean!
+
+  """
+  Whether the gist is public or not.
+  """
+  isPublic: Boolean!
+
+  """
+  The gist name.
+  """
+  name: String!
+
+  """
+  The gist owner.
+  """
+  owner: RepositoryOwner
+
+  """
+  Identifies when the gist was last pushed to.
+  """
+  pushedAt: DateTime
+
+  """
+  The HTML path to this resource.
+  """
+  resourcePath: URI!
+
+  """
+  Returns a count of how many stargazers there are on this object
+  """
+  stargazerCount: Int!
+
+  """
+  A list of users who have starred this starrable.
+  """
+  stargazers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: StarOrder
+  ): StargazerConnection!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this Gist.
+  """
+  url: URI!
+
+  """
+  Returns a boolean indicating whether the viewing user has starred this starrable.
+  """
+  viewerHasStarred: Boolean!
+}
+
+"""
+Represents a comment on an Gist.
+"""
+type GistComment implements Comment & Deletable & Minimizable & Node & Updatable & UpdatableComment {
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the gist.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  Identifies the comment body.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body rendered to text.
+  """
+  bodyText: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  The associated gist.
+  """
+  gist: Gist!
+
+  """
+  The Node ID of the GistComment object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  Returns whether or not a comment has been minimized.
+  """
+  isMinimized: Boolean!
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  Returns why the comment was minimized. One of `abuse`, `off-topic`,
+  `outdated`, `resolved`, `duplicate` and `spam`. Note that the case and
+  formatting of these values differs from the inputs to the `MinimizeComment` mutation.
+  """
+  minimizedReason: String
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Check if the current viewer can minimize this object.
+  """
+  viewerCanMinimize: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+}
+
+"""
+The connection type for GistComment.
+"""
+type GistCommentConnection {
+  """
+  A list of edges.
+  """
+  edges: [GistCommentEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [GistComment]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type GistCommentEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: GistComment
+}
+
+"""
+The connection type for Gist.
+"""
+type GistConnection {
+  """
+  A list of edges.
+  """
+  edges: [GistEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Gist]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type GistEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Gist
+}
+
+"""
+A file in a gist.
+"""
+type GistFile {
+  """
+  The file name encoded to remove characters that are invalid in URL paths.
+  """
+  encodedName: String
+
+  """
+  The gist file encoding.
+  """
+  encoding: String
+
+  """
+  The file extension from the file name.
+  """
+  extension: String
+
+  """
+  Indicates if this file is an image.
+  """
+  isImage: Boolean!
+
+  """
+  Whether the file's contents were truncated.
+  """
+  isTruncated: Boolean!
+
+  """
+  The programming language this file is written in.
+  """
+  language: Language
+
+  """
+  The gist file name.
+  """
+  name: String
+
+  """
+  The gist file size in bytes.
+  """
+  size: Int
+
+  """
+  UTF8 text data or null if the file is binary
+  """
+  text(
+    """
+    Optionally truncate the returned file to this length.
+    """
+    truncate: Int
+  ): String
+}
+
+"""
+Ordering options for gist connections
+"""
+input GistOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order repositories by.
+  """
+  field: GistOrderField!
+}
+
+"""
+Properties by which gist connections can be ordered.
+"""
+enum GistOrderField {
+  """
+  Order gists by creation time
+  """
+  CREATED_AT
+
+  """
+  Order gists by push time
+  """
+  PUSHED_AT
+
+  """
+  Order gists by update time
+  """
+  UPDATED_AT
+}
+
+"""
+The privacy of a Gist
+"""
+enum GistPrivacy {
+  """
+  Gists that are public and secret
+  """
+  ALL
+
+  """
+  Public
+  """
+  PUBLIC
+
+  """
+  Secret
+  """
+  SECRET
+}
+
+"""
+Represents an actor in a Git commit (ie. an author or committer).
+"""
+type GitActor {
+  """
+  A URL pointing to the author's public avatar.
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  The timestamp of the Git action (authoring or committing).
+  """
+  date: GitTimestamp
+
+  """
+  The email in the Git commit.
+  """
+  email: String
+
+  """
+  The name in the Git commit.
+  """
+  name: String
+
+  """
+  The GitHub user corresponding to the email field. Null if no such user exists.
+  """
+  user: User
+}
+
+"""
+The connection type for GitActor.
+"""
+type GitActorConnection {
+  """
+  A list of edges.
+  """
+  edges: [GitActorEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [GitActor]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type GitActorEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: GitActor
+}
+
+"""
+Represents information about the GitHub instance.
+"""
+type GitHubMetadata {
+  """
+  Returns a String that's a SHA of `github-services`
+  """
+  gitHubServicesSha: GitObjectID!
+
+  """
+  IP addresses that users connect to for git operations
+  """
+  gitIpAddresses: [String!]
+
+  """
+  IP addresses that GitHub Enterprise Importer uses for outbound connections
+  """
+  githubEnterpriseImporterIpAddresses: [String!]
+
+  """
+  IP addresses that service hooks are sent from
+  """
+  hookIpAddresses: [String!]
+
+  """
+  IP addresses that the importer connects from
+  """
+  importerIpAddresses: [String!]
+
+  """
+  Whether or not users are verified
+  """
+  isPasswordAuthenticationVerifiable: Boolean!
+
+  """
+  IP addresses for GitHub Pages' A records
+  """
+  pagesIpAddresses: [String!]
+}
+
+"""
+Represents a Git object.
+"""
+interface GitObject {
+  """
+  An abbreviated version of the Git object ID
+  """
+  abbreviatedOid: String!
+
+  """
+  The HTTP path for this Git object
+  """
+  commitResourcePath: URI!
+
+  """
+  The HTTP URL for this Git object
+  """
+  commitUrl: URI!
+
+  """
+  The Node ID of the GitObject object
+  """
+  id: ID!
+
+  """
+  The Git object ID
+  """
+  oid: GitObjectID!
+
+  """
+  The Repository the Git object belongs to
+  """
+  repository: Repository!
+}
+
+"""
+A Git object ID.
+"""
+scalar GitObjectID
+
+"""
+A fully qualified reference name (e.g. `refs/heads/master`).
+"""
+scalar GitRefname @preview(toggledBy: "update-refs-preview")
+
+"""
+Git SSH string
+"""
+scalar GitSSHRemote
+
+"""
+Information about a signature (GPG or S/MIME) on a Commit or Tag.
+"""
+interface GitSignature {
+  """
+  Email used to sign this object.
+  """
+  email: String!
+
+  """
+  True if the signature is valid and verified by GitHub.
+  """
+  isValid: Boolean!
+
+  """
+  Payload for GPG signing object. Raw ODB object without the signature header.
+  """
+  payload: String!
+
+  """
+  ASCII-armored signature header from object.
+  """
+  signature: String!
+
+  """
+  GitHub user corresponding to the email signing this commit.
+  """
+  signer: User
+
+  """
+  The state of this signature. `VALID` if signature is valid and verified by
+  GitHub, otherwise represents reason why signature is considered invalid.
+  """
+  state: GitSignatureState!
+
+  """
+  True if the signature was made with GitHub's signing key.
+  """
+  wasSignedByGitHub: Boolean!
+}
+
+"""
+The state of a Git signature.
+"""
+enum GitSignatureState {
+  """
+  The signing certificate or its chain could not be verified
+  """
+  BAD_CERT
+
+  """
+  Invalid email used for signing
+  """
+  BAD_EMAIL
+
+  """
+  Signing key expired
+  """
+  EXPIRED_KEY
+
+  """
+  Internal error - the GPG verification service misbehaved
+  """
+  GPGVERIFY_ERROR
+
+  """
+  Internal error - the GPG verification service is unavailable at the moment
+  """
+  GPGVERIFY_UNAVAILABLE
+
+  """
+  Invalid signature
+  """
+  INVALID
+
+  """
+  Malformed signature
+  """
+  MALFORMED_SIG
+
+  """
+  The usage flags for the key that signed this don't allow signing
+  """
+  NOT_SIGNING_KEY
+
+  """
+  Email used for signing not known to GitHub
+  """
+  NO_USER
+
+  """
+  Valid signature, though certificate revocation check failed
+  """
+  OCSP_ERROR
+
+  """
+  Valid signature, pending certificate revocation checking
+  """
+  OCSP_PENDING
+
+  """
+  One or more certificates in chain has been revoked
+  """
+  OCSP_REVOKED
+
+  """
+  Key used for signing not known to GitHub
+  """
+  UNKNOWN_KEY
+
+  """
+  Unknown signature type
+  """
+  UNKNOWN_SIG_TYPE
+
+  """
+  Unsigned
+  """
+  UNSIGNED
+
+  """
+  Email used for signing unverified on GitHub
+  """
+  UNVERIFIED_EMAIL
+
+  """
+  Valid signature and verified by GitHub
+  """
+  VALID
+}
+
+"""
+An ISO-8601 encoded date string. Unlike the DateTime type, GitTimestamp is not converted in UTC.
+"""
+scalar GitTimestamp
+
+"""
+Represents a GPG signature on a Commit or Tag.
+"""
+type GpgSignature implements GitSignature {
+  """
+  Email used to sign this object.
+  """
+  email: String!
+
+  """
+  True if the signature is valid and verified by GitHub.
+  """
+  isValid: Boolean!
+
+  """
+  Hex-encoded ID of the key that signed this object.
+  """
+  keyId: String
+
+  """
+  Payload for GPG signing object. Raw ODB object without the signature header.
+  """
+  payload: String!
+
+  """
+  ASCII-armored signature header from object.
+  """
+  signature: String!
+
+  """
+  GitHub user corresponding to the email signing this commit.
+  """
+  signer: User
+
+  """
+  The state of this signature. `VALID` if signature is valid and verified by
+  GitHub, otherwise represents reason why signature is considered invalid.
+  """
+  state: GitSignatureState!
+
+  """
+  True if the signature was made with GitHub's signing key.
+  """
+  wasSignedByGitHub: Boolean!
+}
+
+"""
+Autogenerated input type of GrantEnterpriseOrganizationsMigratorRole
+"""
+input GrantEnterpriseOrganizationsMigratorRoleInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise to which all organizations managed by it will be granted the migrator role.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The login of the user to grant the migrator role
+  """
+  login: String!
+}
+
+"""
+Autogenerated return type of GrantEnterpriseOrganizationsMigratorRole
+"""
+type GrantEnterpriseOrganizationsMigratorRolePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The organizations that had the migrator role applied to for the given user.
+  """
+  organizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): OrganizationConnection
+}
+
+"""
+Autogenerated input type of GrantMigratorRole
+"""
+input GrantMigratorRoleInput {
+  """
+  The user login or Team slug to grant the migrator role.
+  """
+  actor: String!
+
+  """
+  Specifies the type of the actor, can be either USER or TEAM.
+  """
+  actorType: ActorType!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the organization that the user/team belongs to.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of GrantMigratorRole
+"""
+type GrantMigratorRolePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Did the operation succeed?
+  """
+  success: Boolean
+}
+
+"""
+A string containing HTML code.
+"""
+scalar HTML
+
+"""
+Represents a 'head_ref_deleted' event on a given pull request.
+"""
+type HeadRefDeletedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the Ref associated with the `head_ref_deleted` event.
+  """
+  headRef: Ref
+
+  """
+  Identifies the name of the Ref associated with the `head_ref_deleted` event.
+  """
+  headRefName: String!
+
+  """
+  The Node ID of the HeadRefDeletedEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+}
+
+"""
+Represents a 'head_ref_force_pushed' event on a given pull request.
+"""
+type HeadRefForcePushedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the after commit SHA for the 'head_ref_force_pushed' event.
+  """
+  afterCommit: Commit
+
+  """
+  Identifies the before commit SHA for the 'head_ref_force_pushed' event.
+  """
+  beforeCommit: Commit
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the HeadRefForcePushedEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+
+  """
+  Identifies the fully qualified ref name for the 'head_ref_force_pushed' event.
+  """
+  ref: Ref
+}
+
+"""
+Represents a 'head_ref_restored' event on a given pull request.
+"""
+type HeadRefRestoredEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the HeadRefRestoredEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+}
+
+"""
+Detail needed to display a hovercard for a user
+"""
+type Hovercard {
+  """
+  Each of the contexts for this hovercard
+  """
+  contexts: [HovercardContext!]!
+}
+
+"""
+An individual line of a hovercard
+"""
+interface HovercardContext {
+  """
+  A string describing this context
+  """
+  message: String!
+
+  """
+  An octicon to accompany this context
+  """
+  octicon: String!
+}
+
+"""
+The possible states in which authentication can be configured with an identity provider.
+"""
+enum IdentityProviderConfigurationState {
+  """
+  Authentication with an identity provider is configured but not enforced.
+  """
+  CONFIGURED
+
+  """
+  Authentication with an identity provider is configured and enforced.
+  """
+  ENFORCED
+
+  """
+  Authentication with an identity provider is not configured.
+  """
+  UNCONFIGURED
+}
+
+"""
+Autogenerated input type of ImportProject
+"""
+input ImportProjectInput {
+  """
+  The description of Project.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A list of columns containing issues and pull requests.
+  """
+  columnImports: [ProjectColumnImport!]!
+
+  """
+  The name of Project.
+  """
+  name: String!
+
+  """
+  The name of the Organization or User to create the Project under.
+  """
+  ownerName: String!
+
+  """
+  Whether the Project is public or not.
+  """
+  public: Boolean = false
+}
+
+"""
+Autogenerated return type of ImportProject
+"""
+type ImportProjectPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new Project!
+  """
+  project: Project
+}
+
+"""
+Autogenerated input type of InviteEnterpriseAdmin
+"""
+input InviteEnterpriseAdminInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The email of the person to invite as an administrator.
+  """
+  email: String
+
+  """
+  The ID of the enterprise to which you want to invite an administrator.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The login of a user to invite as an administrator.
+  """
+  invitee: String
+
+  """
+  The role of the administrator.
+  """
+  role: EnterpriseAdministratorRole
+}
+
+"""
+Autogenerated return type of InviteEnterpriseAdmin
+"""
+type InviteEnterpriseAdminPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The created enterprise administrator invitation.
+  """
+  invitation: EnterpriseAdministratorInvitation
+}
+
+"""
+The possible values for the IP allow list enabled setting.
+"""
+enum IpAllowListEnabledSettingValue {
+  """
+  The setting is disabled for the owner.
+  """
+  DISABLED
+
+  """
+  The setting is enabled for the owner.
+  """
+  ENABLED
+}
+
+"""
+An IP address or range of addresses that is allowed to access an owner's resources.
+"""
+type IpAllowListEntry implements Node {
+  """
+  A single IP address or range of IP addresses in CIDR notation.
+  """
+  allowListValue: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the IpAllowListEntry object
+  """
+  id: ID!
+
+  """
+  Whether the entry is currently active.
+  """
+  isActive: Boolean!
+
+  """
+  The name of the IP allow list entry.
+  """
+  name: String
+
+  """
+  The owner of the IP allow list entry.
+  """
+  owner: IpAllowListOwner!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for IpAllowListEntry.
+"""
+type IpAllowListEntryConnection {
+  """
+  A list of edges.
+  """
+  edges: [IpAllowListEntryEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [IpAllowListEntry]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type IpAllowListEntryEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: IpAllowListEntry
+}
+
+"""
+Ordering options for IP allow list entry connections.
+"""
+input IpAllowListEntryOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order IP allow list entries by.
+  """
+  field: IpAllowListEntryOrderField!
+}
+
+"""
+Properties by which IP allow list entry connections can be ordered.
+"""
+enum IpAllowListEntryOrderField {
+  """
+  Order IP allow list entries by the allow list value.
+  """
+  ALLOW_LIST_VALUE
+
+  """
+  Order IP allow list entries by creation time.
+  """
+  CREATED_AT
+}
+
+"""
+The possible values for the IP allow list configuration for installed GitHub Apps setting.
+"""
+enum IpAllowListForInstalledAppsEnabledSettingValue {
+  """
+  The setting is disabled for the owner.
+  """
+  DISABLED
+
+  """
+  The setting is enabled for the owner.
+  """
+  ENABLED
+}
+
+"""
+Types that can own an IP allow list.
+"""
+union IpAllowListOwner = App | Enterprise | Organization
+
+"""
+An Issue is a place to discuss ideas, enhancements, tasks, and bugs for a project.
+"""
+type Issue implements Assignable & Closable & Comment & Deletable & Labelable & Lockable & Node & ProjectV2Owner & Reactable & RepositoryNode & Subscribable & SubscribableThread & UniformResourceLocatable & Updatable & UpdatableComment {
+  """
+  Reason that the conversation was locked.
+  """
+  activeLockReason: LockReason
+
+  """
+  A list of Users assigned to this object.
+  """
+  assignees(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the subject of the comment.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  Identifies the body of the issue.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The http path for this issue body
+  """
+  bodyResourcePath: URI!
+
+  """
+  Identifies the body of the issue rendered to text.
+  """
+  bodyText: String!
+
+  """
+  The http URL for this issue body
+  """
+  bodyUrl: URI!
+
+  """
+  Indicates if the object is closed (definition of closed may depend on type)
+  """
+  closed: Boolean!
+
+  """
+  Identifies the date and time when the object was closed.
+  """
+  closedAt: DateTime
+
+  """
+  A list of comments associated with the Issue.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for issue comments returned from the connection.
+    """
+    orderBy: IssueCommentOrder
+  ): IssueCommentConnection!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  Identifies the primary key from the database as a BigInt.
+  """
+  fullDatabaseId: BigInt
+
+  """
+  The hovercard information for this issue
+  """
+  hovercard(
+    """
+    Whether or not to include notification contexts
+    """
+    includeNotificationContexts: Boolean = true
+  ): Hovercard!
+
+  """
+  The Node ID of the Issue object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  Indicates whether or not this issue is currently pinned to the repository issues list
+  """
+  isPinned: Boolean
+
+  """
+  Is this issue read by the viewer
+  """
+  isReadByViewer: Boolean
+
+  """
+  A list of labels associated with the object.
+  """
+  labels(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for labels returned from the connection.
+    """
+    orderBy: LabelOrder = {field: CREATED_AT, direction: ASC}
+  ): LabelConnection
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  Branches linked to this issue.
+  """
+  linkedBranches(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): LinkedBranchConnection!
+
+  """
+  `true` if the object is locked
+  """
+  locked: Boolean!
+
+  """
+  Identifies the milestone associated with the issue.
+  """
+  milestone: Milestone
+
+  """
+  Identifies the issue number.
+  """
+  number: Int!
+
+  """
+  A list of Users that are participating in the Issue conversation.
+  """
+  participants(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+
+  """
+  List of project cards associated with this issue.
+  """
+  projectCards(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    A list of archived states to filter the cards by
+    """
+    archivedStates: [ProjectCardArchivedState] = [ARCHIVED, NOT_ARCHIVED]
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectCardConnection!
+
+  """
+  List of project items associated with this issue.
+  """
+  projectItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Include archived items.
+    """
+    includeArchived: Boolean = true
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2ItemConnection!
+
+  """
+  Find a project by number.
+  """
+  projectV2(
+    """
+    The project number.
+    """
+    number: Int!
+  ): ProjectV2
+
+  """
+  A list of projects under the owner.
+  """
+  projectsV2(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    How to order the returned projects.
+    """
+    orderBy: ProjectV2Order = {field: NUMBER, direction: DESC}
+
+    """
+    A project to search for under the the owner.
+    """
+    query: String
+  ): ProjectV2Connection!
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this issue
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the state of the issue.
+  """
+  state: IssueState!
+
+  """
+  Identifies the reason for the issue state.
+  """
+  stateReason: IssueStateReason
+
+  """
+  A list of events, comments, commits, etc. associated with the issue.
+  """
+  timeline(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows filtering timeline events by a `since` timestamp.
+    """
+    since: DateTime
+  ): IssueTimelineConnection!
+    @deprecated(reason: "`timeline` will be removed Use Issue.timelineItems instead. Removal on 2020-10-01 UTC.")
+
+  """
+  A list of events, comments, commits, etc. associated with the issue.
+  """
+  timelineItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Filter timeline items by type.
+    """
+    itemTypes: [IssueTimelineItemsItemType!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter timeline items by a `since` timestamp.
+    """
+    since: DateTime
+
+    """
+    Skips the first _n_ elements in the list.
+    """
+    skip: Int
+  ): IssueTimelineItemsConnection!
+
+  """
+  Identifies the issue title.
+  """
+  title: String!
+
+  """
+  Identifies the issue title rendered to HTML.
+  """
+  titleHTML: String!
+
+  """
+  A list of issues that track this issue
+  """
+  trackedInIssues(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): IssueConnection!
+
+  """
+  A list of issues tracked inside the current issue
+  """
+  trackedIssues(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): IssueConnection!
+
+  """
+  The number of tracked issues for this issue
+  """
+  trackedIssuesCount(
+    """
+    Limit the count to tracked issues with the specified states.
+    """
+    states: [TrackedIssueStates]
+  ): Int!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this issue
+  """
+  url: URI!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Indicates if the object can be closed by the viewer.
+  """
+  viewerCanClose: Boolean!
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Indicates if the object can be reopened by the viewer.
+  """
+  viewerCanReopen: Boolean!
+
+  """
+  Check if the viewer is able to change their subscription status for the repository.
+  """
+  viewerCanSubscribe: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+
+  """
+  Identifies if the viewer is watching, not watching, or ignoring the subscribable entity.
+  """
+  viewerSubscription: SubscriptionState
+
+  """
+  Identifies the viewer's thread subscription form action.
+  """
+  viewerThreadSubscriptionFormAction: ThreadSubscriptionFormAction
+
+  """
+  Identifies the viewer's thread subscription status.
+  """
+  viewerThreadSubscriptionStatus: ThreadSubscriptionState
+}
+
+"""
+The possible state reasons of a closed issue.
+"""
+enum IssueClosedStateReason {
+  """
+  An issue that has been closed as completed
+  """
+  COMPLETED
+
+  """
+  An issue that has been closed as not planned
+  """
+  NOT_PLANNED
+}
+
+"""
+Represents a comment on an Issue.
+"""
+type IssueComment implements Comment & Deletable & Minimizable & Node & Reactable & RepositoryNode & Updatable & UpdatableComment {
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the subject of the comment.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  The body as Markdown.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body rendered to text.
+  """
+  bodyText: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  Identifies the primary key from the database as a BigInt.
+  """
+  fullDatabaseId: BigInt
+
+  """
+  The Node ID of the IssueComment object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  Returns whether or not a comment has been minimized.
+  """
+  isMinimized: Boolean!
+
+  """
+  Identifies the issue associated with the comment.
+  """
+  issue: Issue!
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  Returns why the comment was minimized. One of `abuse`, `off-topic`,
+  `outdated`, `resolved`, `duplicate` and `spam`. Note that the case and
+  formatting of these values differs from the inputs to the `MinimizeComment` mutation.
+  """
+  minimizedReason: String
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  Returns the pull request associated with the comment, if this comment was made on a
+  pull request.
+  """
+  pullRequest: PullRequest
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this issue comment
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this issue comment
+  """
+  url: URI!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Check if the current viewer can minimize this object.
+  """
+  viewerCanMinimize: Boolean!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+}
+
+"""
+The connection type for IssueComment.
+"""
+type IssueCommentConnection {
+  """
+  A list of edges.
+  """
+  edges: [IssueCommentEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [IssueComment]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type IssueCommentEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: IssueComment
+}
+
+"""
+Ways in which lists of issue comments can be ordered upon return.
+"""
+input IssueCommentOrder {
+  """
+  The direction in which to order issue comments by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order issue comments by.
+  """
+  field: IssueCommentOrderField!
+}
+
+"""
+Properties by which issue comment connections can be ordered.
+"""
+enum IssueCommentOrderField {
+  """
+  Order issue comments by update time
+  """
+  UPDATED_AT
+}
+
+"""
+The connection type for Issue.
+"""
+type IssueConnection {
+  """
+  A list of edges.
+  """
+  edges: [IssueEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Issue]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+This aggregates issues opened by a user within one repository.
+"""
+type IssueContributionsByRepository {
+  """
+  The issue contributions.
+  """
+  contributions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for contributions returned from the connection.
+    """
+    orderBy: ContributionOrder = {direction: DESC}
+  ): CreatedIssueContributionConnection!
+
+  """
+  The repository in which the issues were opened.
+  """
+  repository: Repository!
+}
+
+"""
+An edge in a connection.
+"""
+type IssueEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Issue
+}
+
+"""
+Ways in which to filter lists of issues.
+"""
+input IssueFilters {
+  """
+  List issues assigned to given name. Pass in `null` for issues with no assigned
+  user, and `*` for issues assigned to any user.
+  """
+  assignee: String
+
+  """
+  List issues created by given name.
+  """
+  createdBy: String
+
+  """
+  List issues where the list of label names exist on the issue.
+  """
+  labels: [String!]
+
+  """
+  List issues where the given name is mentioned in the issue.
+  """
+  mentioned: String
+
+  """
+  List issues by given milestone argument. If an string representation of an
+  integer is passed, it should refer to a milestone by its database ID. Pass in
+  `null` for issues with no milestone, and `*` for issues that are assigned to any milestone.
+  """
+  milestone: String
+
+  """
+  List issues by given milestone argument. If an string representation of an
+  integer is passed, it should refer to a milestone by its number field. Pass in
+  `null` for issues with no milestone, and `*` for issues that are assigned to any milestone.
+  """
+  milestoneNumber: String
+
+  """
+  List issues that have been updated at or after the given date.
+  """
+  since: DateTime
+
+  """
+  List issues filtered by the list of states given.
+  """
+  states: [IssueState!]
+
+  """
+  List issues subscribed to by viewer.
+  """
+  viewerSubscribed: Boolean = false
+}
+
+"""
+Used for return value of Repository.issueOrPullRequest.
+"""
+union IssueOrPullRequest = Issue | PullRequest
+
+"""
+Ways in which lists of issues can be ordered upon return.
+"""
+input IssueOrder {
+  """
+  The direction in which to order issues by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order issues by.
+  """
+  field: IssueOrderField!
+}
+
+"""
+Properties by which issue connections can be ordered.
+"""
+enum IssueOrderField {
+  """
+  Order issues by comment count
+  """
+  COMMENTS
+
+  """
+  Order issues by creation time
+  """
+  CREATED_AT
+
+  """
+  Order issues by update time
+  """
+  UPDATED_AT
+}
+
+"""
+The possible states of an issue.
+"""
+enum IssueState {
+  """
+  An issue that has been closed
+  """
+  CLOSED
+
+  """
+  An issue that is still open
+  """
+  OPEN
+}
+
+"""
+The possible state reasons of an issue.
+"""
+enum IssueStateReason {
+  """
+  An issue that has been closed as completed
+  """
+  COMPLETED
+
+  """
+  An issue that has been closed as not planned
+  """
+  NOT_PLANNED
+
+  """
+  An issue that has been reopened
+  """
+  REOPENED
+}
+
+"""
+A repository issue template.
+"""
+type IssueTemplate {
+  """
+  The template purpose.
+  """
+  about: String
+
+  """
+  The suggested assignees.
+  """
+  assignees(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+
+  """
+  The suggested issue body.
+  """
+  body: String
+
+  """
+  The template filename.
+  """
+  filename: String!
+
+  """
+  The suggested issue labels
+  """
+  labels(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for labels returned from the connection.
+    """
+    orderBy: LabelOrder = {field: CREATED_AT, direction: ASC}
+  ): LabelConnection
+
+  """
+  The template name.
+  """
+  name: String!
+
+  """
+  The suggested issue title.
+  """
+  title: String
+}
+
+"""
+The connection type for IssueTimelineItem.
+"""
+type IssueTimelineConnection {
+  """
+  A list of edges.
+  """
+  edges: [IssueTimelineItemEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [IssueTimelineItem]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An item in an issue timeline
+"""
+union IssueTimelineItem =
+    AssignedEvent
+  | ClosedEvent
+  | Commit
+  | CrossReferencedEvent
+  | DemilestonedEvent
+  | IssueComment
+  | LabeledEvent
+  | LockedEvent
+  | MilestonedEvent
+  | ReferencedEvent
+  | RenamedTitleEvent
+  | ReopenedEvent
+  | SubscribedEvent
+  | TransferredEvent
+  | UnassignedEvent
+  | UnlabeledEvent
+  | UnlockedEvent
+  | UnsubscribedEvent
+  | UserBlockedEvent
+
+"""
+An edge in a connection.
+"""
+type IssueTimelineItemEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: IssueTimelineItem
+}
+
+"""
+An item in an issue timeline
+"""
+union IssueTimelineItems =
+    AddedToProjectEvent
+  | AssignedEvent
+  | ClosedEvent
+  | CommentDeletedEvent
+  | ConnectedEvent
+  | ConvertedNoteToIssueEvent
+  | ConvertedToDiscussionEvent
+  | CrossReferencedEvent
+  | DemilestonedEvent
+  | DisconnectedEvent
+  | IssueComment
+  | LabeledEvent
+  | LockedEvent
+  | MarkedAsDuplicateEvent
+  | MentionedEvent
+  | MilestonedEvent
+  | MovedColumnsInProjectEvent
+  | PinnedEvent
+  | ReferencedEvent
+  | RemovedFromProjectEvent
+  | RenamedTitleEvent
+  | ReopenedEvent
+  | SubscribedEvent
+  | TransferredEvent
+  | UnassignedEvent
+  | UnlabeledEvent
+  | UnlockedEvent
+  | UnmarkedAsDuplicateEvent
+  | UnpinnedEvent
+  | UnsubscribedEvent
+  | UserBlockedEvent
+
+"""
+The connection type for IssueTimelineItems.
+"""
+type IssueTimelineItemsConnection {
+  """
+  A list of edges.
+  """
+  edges: [IssueTimelineItemsEdge]
+
+  """
+  Identifies the count of items after applying `before` and `after` filters.
+  """
+  filteredCount: Int!
+
+  """
+  A list of nodes.
+  """
+  nodes: [IssueTimelineItems]
+
+  """
+  Identifies the count of items after applying `before`/`after` filters and `first`/`last`/`skip` slicing.
+  """
+  pageCount: Int!
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+
+  """
+  Identifies the date and time when the timeline was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+An edge in a connection.
+"""
+type IssueTimelineItemsEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: IssueTimelineItems
+}
+
+"""
+The possible item types found in a timeline.
+"""
+enum IssueTimelineItemsItemType {
+  """
+  Represents a 'added_to_project' event on a given issue or pull request.
+  """
+  ADDED_TO_PROJECT_EVENT
+
+  """
+  Represents an 'assigned' event on any assignable object.
+  """
+  ASSIGNED_EVENT
+
+  """
+  Represents a 'closed' event on any `Closable`.
+  """
+  CLOSED_EVENT
+
+  """
+  Represents a 'comment_deleted' event on a given issue or pull request.
+  """
+  COMMENT_DELETED_EVENT
+
+  """
+  Represents a 'connected' event on a given issue or pull request.
+  """
+  CONNECTED_EVENT
+
+  """
+  Represents a 'converted_note_to_issue' event on a given issue or pull request.
+  """
+  CONVERTED_NOTE_TO_ISSUE_EVENT
+
+  """
+  Represents a 'converted_to_discussion' event on a given issue.
+  """
+  CONVERTED_TO_DISCUSSION_EVENT
+
+  """
+  Represents a mention made by one issue or pull request to another.
+  """
+  CROSS_REFERENCED_EVENT
+
+  """
+  Represents a 'demilestoned' event on a given issue or pull request.
+  """
+  DEMILESTONED_EVENT
+
+  """
+  Represents a 'disconnected' event on a given issue or pull request.
+  """
+  DISCONNECTED_EVENT
+
+  """
+  Represents a comment on an Issue.
+  """
+  ISSUE_COMMENT
+
+  """
+  Represents a 'labeled' event on a given issue or pull request.
+  """
+  LABELED_EVENT
+
+  """
+  Represents a 'locked' event on a given issue or pull request.
+  """
+  LOCKED_EVENT
+
+  """
+  Represents a 'marked_as_duplicate' event on a given issue or pull request.
+  """
+  MARKED_AS_DUPLICATE_EVENT
+
+  """
+  Represents a 'mentioned' event on a given issue or pull request.
+  """
+  MENTIONED_EVENT
+
+  """
+  Represents a 'milestoned' event on a given issue or pull request.
+  """
+  MILESTONED_EVENT
+
+  """
+  Represents a 'moved_columns_in_project' event on a given issue or pull request.
+  """
+  MOVED_COLUMNS_IN_PROJECT_EVENT
+
+  """
+  Represents a 'pinned' event on a given issue or pull request.
+  """
+  PINNED_EVENT
+
+  """
+  Represents a 'referenced' event on a given `ReferencedSubject`.
+  """
+  REFERENCED_EVENT
+
+  """
+  Represents a 'removed_from_project' event on a given issue or pull request.
+  """
+  REMOVED_FROM_PROJECT_EVENT
+
+  """
+  Represents a 'renamed' event on a given issue or pull request
+  """
+  RENAMED_TITLE_EVENT
+
+  """
+  Represents a 'reopened' event on any `Closable`.
+  """
+  REOPENED_EVENT
+
+  """
+  Represents a 'subscribed' event on a given `Subscribable`.
+  """
+  SUBSCRIBED_EVENT
+
+  """
+  Represents a 'transferred' event on a given issue or pull request.
+  """
+  TRANSFERRED_EVENT
+
+  """
+  Represents an 'unassigned' event on any assignable object.
+  """
+  UNASSIGNED_EVENT
+
+  """
+  Represents an 'unlabeled' event on a given issue or pull request.
+  """
+  UNLABELED_EVENT
+
+  """
+  Represents an 'unlocked' event on a given issue or pull request.
+  """
+  UNLOCKED_EVENT
+
+  """
+  Represents an 'unmarked_as_duplicate' event on a given issue or pull request.
+  """
+  UNMARKED_AS_DUPLICATE_EVENT
+
+  """
+  Represents an 'unpinned' event on a given issue or pull request.
+  """
+  UNPINNED_EVENT
+
+  """
+  Represents an 'unsubscribed' event on a given `Subscribable`.
+  """
+  UNSUBSCRIBED_EVENT
+
+  """
+  Represents a 'user_blocked' event on a given user.
+  """
+  USER_BLOCKED_EVENT
+}
+
+"""
+Represents a user signing up for a GitHub account.
+"""
+type JoinedGitHubContribution implements Contribution {
+  """
+  Whether this contribution is associated with a record you do not have access to. For
+  example, your own 'first issue' contribution may have been made on a repository you can no
+  longer access.
+  """
+  isRestricted: Boolean!
+
+  """
+  When this contribution was made.
+  """
+  occurredAt: DateTime!
+
+  """
+  The HTTP path for this contribution.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this contribution.
+  """
+  url: URI!
+
+  """
+  The user who made this contribution.
+  """
+  user: User!
+}
+
+"""
+A label for categorizing Issues, Pull Requests, Milestones, or Discussions with a given Repository.
+"""
+type Label implements Node {
+  """
+  Identifies the label color.
+  """
+  color: String!
+
+  """
+  Identifies the date and time when the label was created.
+  """
+  createdAt: DateTime
+
+  """
+  A brief description of this label.
+  """
+  description: String
+
+  """
+  The Node ID of the Label object
+  """
+  id: ID!
+
+  """
+  Indicates whether or not this is a default label.
+  """
+  isDefault: Boolean!
+
+  """
+  A list of issues associated with this label.
+  """
+  issues(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Filtering options for issues returned from the connection.
+    """
+    filterBy: IssueFilters
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for issues returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the issues by.
+    """
+    states: [IssueState!]
+  ): IssueConnection!
+
+  """
+  Identifies the label name.
+  """
+  name: String!
+
+  """
+  A list of pull requests associated with this label.
+  """
+  pullRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    The base ref name to filter the pull requests by.
+    """
+    baseRefName: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    The head ref name to filter the pull requests by.
+    """
+    headRefName: String
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pull requests returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the pull requests by.
+    """
+    states: [PullRequestState!]
+  ): PullRequestConnection!
+
+  """
+  The repository associated with this label.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this label.
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the date and time when the label was last updated.
+  """
+  updatedAt: DateTime
+
+  """
+  The HTTP URL for this label.
+  """
+  url: URI!
+}
+
+"""
+The connection type for Label.
+"""
+type LabelConnection {
+  """
+  A list of edges.
+  """
+  edges: [LabelEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Label]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type LabelEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Label
+}
+
+"""
+Ways in which lists of labels can be ordered upon return.
+"""
+input LabelOrder {
+  """
+  The direction in which to order labels by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order labels by.
+  """
+  field: LabelOrderField!
+}
+
+"""
+Properties by which label connections can be ordered.
+"""
+enum LabelOrderField {
+  """
+  Order labels by creation time
+  """
+  CREATED_AT
+
+  """
+  Order labels by name
+  """
+  NAME
+}
+
+"""
+An object that can have labels assigned to it.
+"""
+interface Labelable {
+  """
+  A list of labels associated with the object.
+  """
+  labels(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for labels returned from the connection.
+    """
+    orderBy: LabelOrder = {field: CREATED_AT, direction: ASC}
+  ): LabelConnection
+}
+
+"""
+Represents a 'labeled' event on a given issue or pull request.
+"""
+type LabeledEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the LabeledEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the label associated with the 'labeled' event.
+  """
+  label: Label!
+
+  """
+  Identifies the `Labelable` associated with the event.
+  """
+  labelable: Labelable!
+}
+
+"""
+Represents a given language found in repositories.
+"""
+type Language implements Node {
+  """
+  The color defined for the current language.
+  """
+  color: String
+
+  """
+  The Node ID of the Language object
+  """
+  id: ID!
+
+  """
+  The name of the current language.
+  """
+  name: String!
+}
+
+"""
+A list of languages associated with the parent.
+"""
+type LanguageConnection {
+  """
+  A list of edges.
+  """
+  edges: [LanguageEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Language]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+
+  """
+  The total size in bytes of files written in that language.
+  """
+  totalSize: Int!
+}
+
+"""
+Represents the language of a repository.
+"""
+type LanguageEdge {
+  cursor: String!
+  node: Language!
+
+  """
+  The number of bytes of code written in the language.
+  """
+  size: Int!
+}
+
+"""
+Ordering options for language connections.
+"""
+input LanguageOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order languages by.
+  """
+  field: LanguageOrderField!
+}
+
+"""
+Properties by which language connections can be ordered.
+"""
+enum LanguageOrderField {
+  """
+  Order languages by the size of all files containing the language
+  """
+  SIZE
+}
+
+"""
+A repository's open source license
+"""
+type License implements Node {
+  """
+  The full text of the license
+  """
+  body: String!
+
+  """
+  The conditions set by the license
+  """
+  conditions: [LicenseRule]!
+
+  """
+  A human-readable description of the license
+  """
+  description: String
+
+  """
+  Whether the license should be featured
+  """
+  featured: Boolean!
+
+  """
+  Whether the license should be displayed in license pickers
+  """
+  hidden: Boolean!
+
+  """
+  The Node ID of the License object
+  """
+  id: ID!
+
+  """
+  Instructions on how to implement the license
+  """
+  implementation: String
+
+  """
+  The lowercased SPDX ID of the license
+  """
+  key: String!
+
+  """
+  The limitations set by the license
+  """
+  limitations: [LicenseRule]!
+
+  """
+  The license full name specified by <https://spdx.org/licenses>
+  """
+  name: String!
+
+  """
+  Customary short name if applicable (e.g, GPLv3)
+  """
+  nickname: String
+
+  """
+  The permissions set by the license
+  """
+  permissions: [LicenseRule]!
+
+  """
+  Whether the license is a pseudo-license placeholder (e.g., other, no-license)
+  """
+  pseudoLicense: Boolean!
+
+  """
+  Short identifier specified by <https://spdx.org/licenses>
+  """
+  spdxId: String
+
+  """
+  URL to the license on <https://choosealicense.com>
+  """
+  url: URI
+}
+
+"""
+Describes a License's conditions, permissions, and limitations
+"""
+type LicenseRule {
+  """
+  A description of the rule
+  """
+  description: String!
+
+  """
+  The machine-readable rule key
+  """
+  key: String!
+
+  """
+  The human-readable rule label
+  """
+  label: String!
+}
+
+"""
+Autogenerated input type of LinkProjectV2ToRepository
+"""
+input LinkProjectV2ToRepositoryInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the project to link to the repository.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+
+  """
+  The ID of the repository to link to the project.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of LinkProjectV2ToRepository
+"""
+type LinkProjectV2ToRepositoryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The repository the project is linked to.
+  """
+  repository: Repository
+}
+
+"""
+Autogenerated input type of LinkProjectV2ToTeam
+"""
+input LinkProjectV2ToTeamInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the project to link to the team.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+
+  """
+  The ID of the team to link to the project.
+  """
+  teamId: ID! @possibleTypes(concreteTypes: ["Team"])
+}
+
+"""
+Autogenerated return type of LinkProjectV2ToTeam
+"""
+type LinkProjectV2ToTeamPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The team the project is linked to
+  """
+  team: Team
+}
+
+"""
+Autogenerated input type of LinkRepositoryToProject
+"""
+input LinkRepositoryToProjectInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Project to link to a Repository
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["Project"])
+
+  """
+  The ID of the Repository to link to a Project.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of LinkRepositoryToProject
+"""
+type LinkRepositoryToProjectPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The linked Project.
+  """
+  project: Project
+
+  """
+  The linked Repository.
+  """
+  repository: Repository
+}
+
+"""
+A branch linked to an issue.
+"""
+type LinkedBranch implements Node {
+  """
+  The Node ID of the LinkedBranch object
+  """
+  id: ID!
+
+  """
+  The branch's ref.
+  """
+  ref: Ref
+}
+
+"""
+The connection type for LinkedBranch.
+"""
+type LinkedBranchConnection {
+  """
+  A list of edges.
+  """
+  edges: [LinkedBranchEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [LinkedBranch]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type LinkedBranchEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: LinkedBranch
+}
+
+"""
+Autogenerated input type of LockLockable
+"""
+input LockLockableInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A reason for why the item will be locked.
+  """
+  lockReason: LockReason
+
+  """
+  ID of the item to be locked.
+  """
+  lockableId: ID! @possibleTypes(concreteTypes: ["Discussion", "Issue", "PullRequest"], abstractType: "Lockable")
+}
+
+"""
+Autogenerated return type of LockLockable
+"""
+type LockLockablePayload {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The item that was locked.
+  """
+  lockedRecord: Lockable
+}
+
+"""
+The possible reasons that an issue or pull request was locked.
+"""
+enum LockReason {
+  """
+  The issue or pull request was locked because the conversation was off-topic.
+  """
+  OFF_TOPIC
+
+  """
+  The issue or pull request was locked because the conversation was resolved.
+  """
+  RESOLVED
+
+  """
+  The issue or pull request was locked because the conversation was spam.
+  """
+  SPAM
+
+  """
+  The issue or pull request was locked because the conversation was too heated.
+  """
+  TOO_HEATED
+}
+
+"""
+An object that can be locked.
+"""
+interface Lockable {
+  """
+  Reason that the conversation was locked.
+  """
+  activeLockReason: LockReason
+
+  """
+  `true` if the object is locked
+  """
+  locked: Boolean!
+}
+
+"""
+Represents a 'locked' event on a given issue or pull request.
+"""
+type LockedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the LockedEvent object
+  """
+  id: ID!
+
+  """
+  Reason that the conversation was locked (optional).
+  """
+  lockReason: LockReason
+
+  """
+  Object that was locked.
+  """
+  lockable: Lockable!
+}
+
+"""
+A placeholder user for attribution of imported data on GitHub.
+"""
+type Mannequin implements Actor & Node & UniformResourceLocatable {
+  """
+  A URL pointing to the GitHub App's public avatar.
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  The user that has claimed the data attributed to this mannequin.
+  """
+  claimant: User
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The mannequin's email on the source instance.
+  """
+  email: String
+
+  """
+  The Node ID of the Mannequin object
+  """
+  id: ID!
+
+  """
+  The username of the actor.
+  """
+  login: String!
+
+  """
+  The HTML path to this resource.
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The URL to this resource.
+  """
+  url: URI!
+}
+
+"""
+The connection type for Mannequin.
+"""
+type MannequinConnection {
+  """
+  A list of edges.
+  """
+  edges: [MannequinEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Mannequin]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a mannequin.
+"""
+type MannequinEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Mannequin
+}
+
+"""
+Ordering options for mannequins.
+"""
+input MannequinOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order mannequins by.
+  """
+  field: MannequinOrderField!
+}
+
+"""
+Properties by which mannequins can be ordered.
+"""
+enum MannequinOrderField {
+  """
+  Order mannequins why when they were created.
+  """
+  CREATED_AT
+
+  """
+  Order mannequins alphabetically by their source login.
+  """
+  LOGIN
+}
+
+"""
+Autogenerated input type of MarkDiscussionCommentAsAnswer
+"""
+input MarkDiscussionCommentAsAnswerInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the discussion comment to mark as an answer.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["DiscussionComment"])
+}
+
+"""
+Autogenerated return type of MarkDiscussionCommentAsAnswer
+"""
+type MarkDiscussionCommentAsAnswerPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The discussion that includes the chosen comment.
+  """
+  discussion: Discussion
+}
+
+"""
+Autogenerated input type of MarkFileAsViewed
+"""
+input MarkFileAsViewedInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The path of the file to mark as viewed
+  """
+  path: String!
+
+  """
+  The Node ID of the pull request.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of MarkFileAsViewed
+"""
+type MarkFileAsViewedPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated pull request.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Autogenerated input type of MarkProjectV2AsTemplate
+"""
+input MarkProjectV2AsTemplateInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Project to mark as a template.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated return type of MarkProjectV2AsTemplate
+"""
+type MarkProjectV2AsTemplatePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The project.
+  """
+  projectV2: ProjectV2
+}
+
+"""
+Autogenerated input type of MarkPullRequestReadyForReview
+"""
+input MarkPullRequestReadyForReviewInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the pull request to be marked as ready for review.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of MarkPullRequestReadyForReview
+"""
+type MarkPullRequestReadyForReviewPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request that is ready for review.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents a 'marked_as_duplicate' event on a given issue or pull request.
+"""
+type MarkedAsDuplicateEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  The authoritative issue or pull request which has been duplicated by another.
+  """
+  canonical: IssueOrPullRequest
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The issue or pull request which has been marked as a duplicate of another.
+  """
+  duplicate: IssueOrPullRequest
+
+  """
+  The Node ID of the MarkedAsDuplicateEvent object
+  """
+  id: ID!
+
+  """
+  Canonical and duplicate belong to different repositories.
+  """
+  isCrossRepository: Boolean!
+}
+
+"""
+A public description of a Marketplace category.
+"""
+type MarketplaceCategory implements Node {
+  """
+  The category's description.
+  """
+  description: String
+
+  """
+  The technical description of how apps listed in this category work with GitHub.
+  """
+  howItWorks: String
+
+  """
+  The Node ID of the MarketplaceCategory object
+  """
+  id: ID!
+
+  """
+  The category's name.
+  """
+  name: String!
+
+  """
+  How many Marketplace listings have this as their primary category.
+  """
+  primaryListingCount: Int!
+
+  """
+  The HTTP path for this Marketplace category.
+  """
+  resourcePath: URI!
+
+  """
+  How many Marketplace listings have this as their secondary category.
+  """
+  secondaryListingCount: Int!
+
+  """
+  The short name of the category used in its URL.
+  """
+  slug: String!
+
+  """
+  The HTTP URL for this Marketplace category.
+  """
+  url: URI!
+}
+
+"""
+A listing in the GitHub integration marketplace.
+"""
+type MarketplaceListing implements Node {
+  """
+  The GitHub App this listing represents.
+  """
+  app: App
+
+  """
+  URL to the listing owner's company site.
+  """
+  companyUrl: URI
+
+  """
+  The HTTP path for configuring access to the listing's integration or OAuth app
+  """
+  configurationResourcePath: URI!
+
+  """
+  The HTTP URL for configuring access to the listing's integration or OAuth app
+  """
+  configurationUrl: URI!
+
+  """
+  URL to the listing's documentation.
+  """
+  documentationUrl: URI
+
+  """
+  The listing's detailed description.
+  """
+  extendedDescription: String
+
+  """
+  The listing's detailed description rendered to HTML.
+  """
+  extendedDescriptionHTML: HTML!
+
+  """
+  The listing's introductory description.
+  """
+  fullDescription: String!
+
+  """
+  The listing's introductory description rendered to HTML.
+  """
+  fullDescriptionHTML: HTML!
+
+  """
+  Does this listing have any plans with a free trial?
+  """
+  hasPublishedFreeTrialPlans: Boolean!
+
+  """
+  Does this listing have a terms of service link?
+  """
+  hasTermsOfService: Boolean!
+
+  """
+  Whether the creator of the app is a verified org
+  """
+  hasVerifiedOwner: Boolean!
+
+  """
+  A technical description of how this app works with GitHub.
+  """
+  howItWorks: String
+
+  """
+  The listing's technical description rendered to HTML.
+  """
+  howItWorksHTML: HTML!
+
+  """
+  The Node ID of the MarketplaceListing object
+  """
+  id: ID!
+
+  """
+  URL to install the product to the viewer's account or organization.
+  """
+  installationUrl: URI
+
+  """
+  Whether this listing's app has been installed for the current viewer
+  """
+  installedForViewer: Boolean!
+
+  """
+  Whether this listing has been removed from the Marketplace.
+  """
+  isArchived: Boolean!
+
+  """
+  Whether this listing is still an editable draft that has not been submitted
+  for review and is not publicly visible in the Marketplace.
+  """
+  isDraft: Boolean!
+
+  """
+  Whether the product this listing represents is available as part of a paid plan.
+  """
+  isPaid: Boolean!
+
+  """
+  Whether this listing has been approved for display in the Marketplace.
+  """
+  isPublic: Boolean!
+
+  """
+  Whether this listing has been rejected by GitHub for display in the Marketplace.
+  """
+  isRejected: Boolean!
+
+  """
+  Whether this listing has been approved for unverified display in the Marketplace.
+  """
+  isUnverified: Boolean!
+
+  """
+  Whether this draft listing has been submitted for review for approval to be unverified in the Marketplace.
+  """
+  isUnverifiedPending: Boolean!
+
+  """
+  Whether this draft listing has been submitted for review from GitHub for approval to be verified in the Marketplace.
+  """
+  isVerificationPendingFromDraft: Boolean!
+
+  """
+  Whether this unverified listing has been submitted for review from GitHub for approval to be verified in the Marketplace.
+  """
+  isVerificationPendingFromUnverified: Boolean!
+
+  """
+  Whether this listing has been approved for verified display in the Marketplace.
+  """
+  isVerified: Boolean!
+
+  """
+  The hex color code, without the leading '#', for the logo background.
+  """
+  logoBackgroundColor: String!
+
+  """
+  URL for the listing's logo image.
+  """
+  logoUrl(
+    """
+    The size in pixels of the resulting square image.
+    """
+    size: Int = 400
+  ): URI
+
+  """
+  The listing's full name.
+  """
+  name: String!
+
+  """
+  The listing's very short description without a trailing period or ampersands.
+  """
+  normalizedShortDescription: String!
+
+  """
+  URL to the listing's detailed pricing.
+  """
+  pricingUrl: URI
+
+  """
+  The category that best describes the listing.
+  """
+  primaryCategory: MarketplaceCategory!
+
+  """
+  URL to the listing's privacy policy, may return an empty string for listings that do not require a privacy policy URL.
+  """
+  privacyPolicyUrl: URI!
+
+  """
+  The HTTP path for the Marketplace listing.
+  """
+  resourcePath: URI!
+
+  """
+  The URLs for the listing's screenshots.
+  """
+  screenshotUrls: [String]!
+
+  """
+  An alternate category that describes the listing.
+  """
+  secondaryCategory: MarketplaceCategory
+
+  """
+  The listing's very short description.
+  """
+  shortDescription: String!
+
+  """
+  The short name of the listing used in its URL.
+  """
+  slug: String!
+
+  """
+  URL to the listing's status page.
+  """
+  statusUrl: URI
+
+  """
+  An email address for support for this listing's app.
+  """
+  supportEmail: String
+
+  """
+  Either a URL or an email address for support for this listing's app, may
+  return an empty string for listings that do not require a support URL.
+  """
+  supportUrl: URI!
+
+  """
+  URL to the listing's terms of service.
+  """
+  termsOfServiceUrl: URI
+
+  """
+  The HTTP URL for the Marketplace listing.
+  """
+  url: URI!
+
+  """
+  Can the current viewer add plans for this Marketplace listing.
+  """
+  viewerCanAddPlans: Boolean!
+
+  """
+  Can the current viewer approve this Marketplace listing.
+  """
+  viewerCanApprove: Boolean!
+
+  """
+  Can the current viewer delist this Marketplace listing.
+  """
+  viewerCanDelist: Boolean!
+
+  """
+  Can the current viewer edit this Marketplace listing.
+  """
+  viewerCanEdit: Boolean!
+
+  """
+  Can the current viewer edit the primary and secondary category of this
+  Marketplace listing.
+  """
+  viewerCanEditCategories: Boolean!
+
+  """
+  Can the current viewer edit the plans for this Marketplace listing.
+  """
+  viewerCanEditPlans: Boolean!
+
+  """
+  Can the current viewer return this Marketplace listing to draft state
+  so it becomes editable again.
+  """
+  viewerCanRedraft: Boolean!
+
+  """
+  Can the current viewer reject this Marketplace listing by returning it to
+  an editable draft state or rejecting it entirely.
+  """
+  viewerCanReject: Boolean!
+
+  """
+  Can the current viewer request this listing be reviewed for display in
+  the Marketplace as verified.
+  """
+  viewerCanRequestApproval: Boolean!
+
+  """
+  Indicates whether the current user has an active subscription to this Marketplace listing.
+  """
+  viewerHasPurchased: Boolean!
+
+  """
+  Indicates if the current user has purchased a subscription to this Marketplace listing
+  for all of the organizations the user owns.
+  """
+  viewerHasPurchasedForAllOrganizations: Boolean!
+
+  """
+  Does the current viewer role allow them to administer this Marketplace listing.
+  """
+  viewerIsListingAdmin: Boolean!
+}
+
+"""
+Look up Marketplace Listings
+"""
+type MarketplaceListingConnection {
+  """
+  A list of edges.
+  """
+  edges: [MarketplaceListingEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [MarketplaceListing]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type MarketplaceListingEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: MarketplaceListing
+}
+
+"""
+Represents a member feature request notification
+"""
+type MemberFeatureRequestNotification implements Node {
+  """
+  Represents member feature request body containing organization name and the number of feature requests
+  """
+  body: String!
+
+  """
+  The Node ID of the MemberFeatureRequestNotification object
+  """
+  id: ID!
+
+  """
+  Represents member feature request notification title
+  """
+  title: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+Entities that have members who can set status messages.
+"""
+interface MemberStatusable {
+  """
+  Get the status messages members of this entity have set that are either public or visible only to the organization.
+  """
+  memberStatuses(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for user statuses returned from the connection.
+    """
+    orderBy: UserStatusOrder = {field: UPDATED_AT, direction: DESC}
+  ): UserStatusConnection!
+}
+
+"""
+Audit log entry for a members_can_delete_repos.clear event.
+"""
+type MembersCanDeleteReposClearAuditEntry implements AuditEntry & EnterpriseAuditEntryData & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The HTTP path for this enterprise.
+  """
+  enterpriseResourcePath: URI
+
+  """
+  The slug of the enterprise.
+  """
+  enterpriseSlug: String
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  enterpriseUrl: URI
+
+  """
+  The Node ID of the MembersCanDeleteReposClearAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a members_can_delete_repos.disable event.
+"""
+type MembersCanDeleteReposDisableAuditEntry implements AuditEntry & EnterpriseAuditEntryData & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The HTTP path for this enterprise.
+  """
+  enterpriseResourcePath: URI
+
+  """
+  The slug of the enterprise.
+  """
+  enterpriseSlug: String
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  enterpriseUrl: URI
+
+  """
+  The Node ID of the MembersCanDeleteReposDisableAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a members_can_delete_repos.enable event.
+"""
+type MembersCanDeleteReposEnableAuditEntry implements AuditEntry & EnterpriseAuditEntryData & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The HTTP path for this enterprise.
+  """
+  enterpriseResourcePath: URI
+
+  """
+  The slug of the enterprise.
+  """
+  enterpriseSlug: String
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  enterpriseUrl: URI
+
+  """
+  The Node ID of the MembersCanDeleteReposEnableAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Represents a 'mentioned' event on a given issue or pull request.
+"""
+type MentionedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the MentionedEvent object
+  """
+  id: ID!
+}
+
+"""
+Autogenerated input type of MergeBranch
+"""
+input MergeBranchInput {
+  """
+  The email address to associate with this commit.
+  """
+  authorEmail: String
+
+  """
+  The name of the base branch that the provided head will be merged into.
+  """
+  base: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Message to use for the merge commit. If omitted, a default will be used.
+  """
+  commitMessage: String
+
+  """
+  The head to merge into the base branch. This can be a branch name or a commit GitObjectID.
+  """
+  head: String!
+
+  """
+  The Node ID of the Repository containing the base branch that will be modified.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of MergeBranch
+"""
+type MergeBranchPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The resulting merge Commit.
+  """
+  mergeCommit: Commit
+}
+
+"""
+The possible default commit messages for merges.
+"""
+enum MergeCommitMessage {
+  """
+  Default to a blank commit message.
+  """
+  BLANK
+
+  """
+  Default to the pull request's body.
+  """
+  PR_BODY
+
+  """
+  Default to the pull request's title.
+  """
+  PR_TITLE
+}
+
+"""
+The possible default commit titles for merges.
+"""
+enum MergeCommitTitle {
+  """
+  Default to the classic title for a merge message (e.g., Merge pull request #123 from branch-name).
+  """
+  MERGE_MESSAGE
+
+  """
+  Default to the pull request's title.
+  """
+  PR_TITLE
+}
+
+"""
+Autogenerated input type of MergePullRequest
+"""
+input MergePullRequestInput {
+  """
+  The email address to associate with this merge.
+  """
+  authorEmail: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Commit body to use for the merge commit; if omitted, a default message will be used
+  """
+  commitBody: String
+
+  """
+  Commit headline to use for the merge commit; if omitted, a default message will be used.
+  """
+  commitHeadline: String
+
+  """
+  OID that the pull request head ref must match to allow merge; if omitted, no check is performed.
+  """
+  expectedHeadOid: GitObjectID
+
+  """
+  The merge method to use. If omitted, defaults to 'MERGE'
+  """
+  mergeMethod: PullRequestMergeMethod = MERGE
+
+  """
+  ID of the pull request to be merged.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of MergePullRequest
+"""
+type MergePullRequestPayload {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request that was merged.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+The queue of pull request entries to be merged into a protected branch in a repository.
+"""
+type MergeQueue implements Node {
+  """
+  The configuration for this merge queue
+  """
+  configuration: MergeQueueConfiguration
+
+  """
+  The entries in the queue
+  """
+  entries(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): MergeQueueEntryConnection
+
+  """
+  The Node ID of the MergeQueue object
+  """
+  id: ID!
+
+  """
+  The estimated time in seconds until a newly added entry would be merged
+  """
+  nextEntryEstimatedTimeToMerge: Int
+
+  """
+  The repository this merge queue belongs to
+  """
+  repository: Repository
+
+  """
+  The HTTP path for this merge queue
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this merge queue
+  """
+  url: URI!
+}
+
+"""
+Configuration for a MergeQueue
+"""
+type MergeQueueConfiguration {
+  """
+  The amount of time in minutes to wait for a check response before considering it a failure.
+  """
+  checkResponseTimeout: Int
+
+  """
+  The maximum number of entries to build at once.
+  """
+  maximumEntriesToBuild: Int
+
+  """
+  The maximum number of entries to merge at once.
+  """
+  maximumEntriesToMerge: Int
+
+  """
+  The merge method to use for this queue.
+  """
+  mergeMethod: PullRequestMergeMethod
+
+  """
+  The strategy to use when merging entries.
+  """
+  mergingStrategy: MergeQueueMergingStrategy
+
+  """
+  The minimum number of entries required to merge at once.
+  """
+  minimumEntriesToMerge: Int
+
+  """
+  The amount of time in minutes to wait before ignoring the minimum number of
+  entries in the queue requirement and merging a collection of entries
+  """
+  minimumEntriesToMergeWaitTime: Int
+}
+
+"""
+Entries in a MergeQueue
+"""
+type MergeQueueEntry implements Node {
+  """
+  The base commit for this entry
+  """
+  baseCommit: Commit
+
+  """
+  The date and time this entry was added to the merge queue
+  """
+  enqueuedAt: DateTime!
+
+  """
+  The actor that enqueued this entry
+  """
+  enqueuer: Actor!
+
+  """
+  The estimated time in seconds until this entry will be merged
+  """
+  estimatedTimeToMerge: Int
+
+  """
+  The head commit for this entry
+  """
+  headCommit: Commit
+
+  """
+  The Node ID of the MergeQueueEntry object
+  """
+  id: ID!
+
+  """
+  Whether this pull request should jump the queue
+  """
+  jump: Boolean!
+
+  """
+  The merge queue that this entry belongs to
+  """
+  mergeQueue: MergeQueue
+
+  """
+  The position of this entry in the queue
+  """
+  position: Int!
+
+  """
+  The pull request that will be added to a merge group
+  """
+  pullRequest: PullRequest
+
+  """
+  Does this pull request need to be deployed on its own
+  """
+  solo: Boolean!
+
+  """
+  The state of this entry in the queue
+  """
+  state: MergeQueueEntryState!
+}
+
+"""
+The connection type for MergeQueueEntry.
+"""
+type MergeQueueEntryConnection {
+  """
+  A list of edges.
+  """
+  edges: [MergeQueueEntryEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [MergeQueueEntry]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type MergeQueueEntryEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: MergeQueueEntry
+}
+
+"""
+The possible states for a merge queue entry.
+"""
+enum MergeQueueEntryState {
+  """
+  The entry is currently waiting for checks to pass.
+  """
+  AWAITING_CHECKS
+
+  """
+  The entry is currently locked.
+  """
+  LOCKED
+
+  """
+  The entry is currently mergeable.
+  """
+  MERGEABLE
+
+  """
+  The entry is currently queued.
+  """
+  QUEUED
+
+  """
+  The entry is currently unmergeable.
+  """
+  UNMERGEABLE
+}
+
+"""
+The possible merging strategies for a merge queue.
+"""
+enum MergeQueueMergingStrategy {
+  """
+  Entries only allowed to merge if they are passing.
+  """
+  ALLGREEN
+
+  """
+  Failing Entries are allowed to merge if they are with a passing entry.
+  """
+  HEADGREEN
+}
+
+"""
+Detailed status information about a pull request merge.
+"""
+enum MergeStateStatus {
+  """
+  The head ref is out of date.
+  """
+  BEHIND
+
+  """
+  The merge is blocked.
+  """
+  BLOCKED
+
+  """
+  Mergeable and passing commit status.
+  """
+  CLEAN
+
+  """
+  The merge commit cannot be cleanly created.
+  """
+  DIRTY
+
+  """
+  The merge is blocked due to the pull request being a draft.
+  """
+  DRAFT
+    @deprecated(
+      reason: "DRAFT state will be removed from this enum and `isDraft` should be used instead Use PullRequest.isDraft instead. Removal on 2021-01-01 UTC."
+    )
+
+  """
+  Mergeable with passing commit status and pre-receive hooks.
+  """
+  HAS_HOOKS
+
+  """
+  The state cannot currently be determined.
+  """
+  UNKNOWN
+
+  """
+  Mergeable with non-passing commit status.
+  """
+  UNSTABLE
+}
+
+"""
+Whether or not a PullRequest can be merged.
+"""
+enum MergeableState {
+  """
+  The pull request cannot be merged due to merge conflicts.
+  """
+  CONFLICTING
+
+  """
+  The pull request can be merged.
+  """
+  MERGEABLE
+
+  """
+  The mergeability of the pull request is still being calculated.
+  """
+  UNKNOWN
+}
+
+"""
+Represents a 'merged' event on a given pull request.
+"""
+type MergedEvent implements Node & UniformResourceLocatable {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the commit associated with the `merge` event.
+  """
+  commit: Commit
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the MergedEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the Ref associated with the `merge` event.
+  """
+  mergeRef: Ref
+
+  """
+  Identifies the name of the Ref associated with the `merge` event.
+  """
+  mergeRefName: String!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+
+  """
+  The HTTP path for this merged event.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this merged event.
+  """
+  url: URI!
+}
+
+"""
+Represents a GitHub Enterprise Importer (GEI) migration.
+"""
+interface Migration {
+  """
+  The migration flag to continue on error.
+  """
+  continueOnError: Boolean!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: String
+
+  """
+  The reason the migration failed.
+  """
+  failureReason: String
+
+  """
+  The Node ID of the Migration object
+  """
+  id: ID!
+
+  """
+  The URL for the migration log (expires 1 day after migration completes).
+  """
+  migrationLogUrl: URI
+
+  """
+  The migration source.
+  """
+  migrationSource: MigrationSource!
+
+  """
+  The target repository name.
+  """
+  repositoryName: String!
+
+  """
+  The migration source URL, for example `https://github.com` or `https://monalisa.ghe.com`.
+  """
+  sourceUrl: URI!
+
+  """
+  The migration state.
+  """
+  state: MigrationState!
+
+  """
+  The number of warnings encountered for this migration. To review the warnings,
+  check the [Migration Log](https://docs.github.com/en/migrations/using-github-enterprise-importer/completing-your-migration-with-github-enterprise-importer/accessing-your-migration-logs-for-github-enterprise-importer).
+  """
+  warningsCount: Int!
+}
+
+"""
+A GitHub Enterprise Importer (GEI) migration source.
+"""
+type MigrationSource implements Node {
+  """
+  The Node ID of the MigrationSource object
+  """
+  id: ID!
+
+  """
+  The migration source name.
+  """
+  name: String!
+
+  """
+  The migration source type.
+  """
+  type: MigrationSourceType!
+
+  """
+  The migration source URL, for example `https://github.com` or `https://monalisa.ghe.com`.
+  """
+  url: URI!
+}
+
+"""
+Represents the different GitHub Enterprise Importer (GEI) migration sources.
+"""
+enum MigrationSourceType {
+  """
+  An Azure DevOps migration source.
+  """
+  AZURE_DEVOPS
+
+  """
+  A Bitbucket Server migration source.
+  """
+  BITBUCKET_SERVER
+
+  """
+  A GitHub Migration API source.
+  """
+  GITHUB_ARCHIVE
+}
+
+"""
+The GitHub Enterprise Importer (GEI) migration state.
+"""
+enum MigrationState {
+  """
+  The migration has failed.
+  """
+  FAILED
+
+  """
+  The migration has invalid credentials.
+  """
+  FAILED_VALIDATION
+
+  """
+  The migration is in progress.
+  """
+  IN_PROGRESS
+
+  """
+  The migration has not started.
+  """
+  NOT_STARTED
+
+  """
+  The migration needs to have its credentials validated.
+  """
+  PENDING_VALIDATION
+
+  """
+  The migration has been queued.
+  """
+  QUEUED
+
+  """
+  The migration has succeeded.
+  """
+  SUCCEEDED
+}
+
+"""
+Represents a Milestone object on a given repository.
+"""
+type Milestone implements Closable & Node & UniformResourceLocatable {
+  """
+  Indicates if the object is closed (definition of closed may depend on type)
+  """
+  closed: Boolean!
+
+  """
+  Identifies the date and time when the object was closed.
+  """
+  closedAt: DateTime
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the actor who created the milestone.
+  """
+  creator: Actor
+
+  """
+  Identifies the description of the milestone.
+  """
+  description: String
+
+  """
+  Identifies the due date of the milestone.
+  """
+  dueOn: DateTime
+
+  """
+  The Node ID of the Milestone object
+  """
+  id: ID!
+
+  """
+  A list of issues associated with the milestone.
+  """
+  issues(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Filtering options for issues returned from the connection.
+    """
+    filterBy: IssueFilters
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for issues returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the issues by.
+    """
+    states: [IssueState!]
+  ): IssueConnection!
+
+  """
+  Identifies the number of the milestone.
+  """
+  number: Int!
+
+  """
+  Identifies the percentage complete for the milestone
+  """
+  progressPercentage: Float!
+
+  """
+  A list of pull requests associated with the milestone.
+  """
+  pullRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    The base ref name to filter the pull requests by.
+    """
+    baseRefName: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    The head ref name to filter the pull requests by.
+    """
+    headRefName: String
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pull requests returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the pull requests by.
+    """
+    states: [PullRequestState!]
+  ): PullRequestConnection!
+
+  """
+  The repository associated with this milestone.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this milestone
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the state of the milestone.
+  """
+  state: MilestoneState!
+
+  """
+  Identifies the title of the milestone.
+  """
+  title: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this milestone
+  """
+  url: URI!
+
+  """
+  Indicates if the object can be closed by the viewer.
+  """
+  viewerCanClose: Boolean!
+
+  """
+  Indicates if the object can be reopened by the viewer.
+  """
+  viewerCanReopen: Boolean!
+}
+
+"""
+The connection type for Milestone.
+"""
+type MilestoneConnection {
+  """
+  A list of edges.
+  """
+  edges: [MilestoneEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Milestone]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type MilestoneEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Milestone
+}
+
+"""
+Types that can be inside a Milestone.
+"""
+union MilestoneItem = Issue | PullRequest
+
+"""
+Ordering options for milestone connections.
+"""
+input MilestoneOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order milestones by.
+  """
+  field: MilestoneOrderField!
+}
+
+"""
+Properties by which milestone connections can be ordered.
+"""
+enum MilestoneOrderField {
+  """
+  Order milestones by when they were created.
+  """
+  CREATED_AT
+
+  """
+  Order milestones by when they are due.
+  """
+  DUE_DATE
+
+  """
+  Order milestones by their number.
+  """
+  NUMBER
+
+  """
+  Order milestones by when they were last updated.
+  """
+  UPDATED_AT
+}
+
+"""
+The possible states of a milestone.
+"""
+enum MilestoneState {
+  """
+  A milestone that has been closed.
+  """
+  CLOSED
+
+  """
+  A milestone that is still open.
+  """
+  OPEN
+}
+
+"""
+Represents a 'milestoned' event on a given issue or pull request.
+"""
+type MilestonedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the MilestonedEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the milestone title associated with the 'milestoned' event.
+  """
+  milestoneTitle: String!
+
+  """
+  Object referenced by event.
+  """
+  subject: MilestoneItem!
+}
+
+"""
+Entities that can be minimized.
+"""
+interface Minimizable {
+  """
+  Returns whether or not a comment has been minimized.
+  """
+  isMinimized: Boolean!
+
+  """
+  Returns why the comment was minimized. One of `abuse`, `off-topic`,
+  `outdated`, `resolved`, `duplicate` and `spam`. Note that the case and
+  formatting of these values differs from the inputs to the `MinimizeComment` mutation.
+  """
+  minimizedReason: String
+
+  """
+  Check if the current viewer can minimize this object.
+  """
+  viewerCanMinimize: Boolean!
+}
+
+"""
+Autogenerated input type of MinimizeComment
+"""
+input MinimizeCommentInput {
+  """
+  The classification of comment
+  """
+  classifier: ReportedContentClassifiers!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the subject to modify.
+  """
+  subjectId: ID!
+    @possibleTypes(
+      concreteTypes: [
+        "CommitComment"
+        "DiscussionComment"
+        "GistComment"
+        "IssueComment"
+        "PullRequestReview"
+        "PullRequestReviewComment"
+      ]
+      abstractType: "Minimizable"
+    )
+}
+
+"""
+Autogenerated return type of MinimizeComment
+"""
+type MinimizeCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The comment that was minimized.
+  """
+  minimizedComment: Minimizable
+}
+
+"""
+Autogenerated input type of MoveProjectCard
+"""
+input MoveProjectCardInput {
+  """
+  Place the new card after the card with this id. Pass null to place it at the top.
+  """
+  afterCardId: ID @possibleTypes(concreteTypes: ["ProjectCard"])
+
+  """
+  The id of the card to move.
+  """
+  cardId: ID! @possibleTypes(concreteTypes: ["ProjectCard"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The id of the column to move it into.
+  """
+  columnId: ID! @possibleTypes(concreteTypes: ["ProjectColumn"])
+}
+
+"""
+Autogenerated return type of MoveProjectCard
+"""
+type MoveProjectCardPayload {
+  """
+  The new edge of the moved card.
+  """
+  cardEdge: ProjectCardEdge
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of MoveProjectColumn
+"""
+input MoveProjectColumnInput {
+  """
+  Place the new column after the column with this id. Pass null to place it at the front.
+  """
+  afterColumnId: ID @possibleTypes(concreteTypes: ["ProjectColumn"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The id of the column to move.
+  """
+  columnId: ID! @possibleTypes(concreteTypes: ["ProjectColumn"])
+}
+
+"""
+Autogenerated return type of MoveProjectColumn
+"""
+type MoveProjectColumnPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new edge of the moved column.
+  """
+  columnEdge: ProjectColumnEdge
+}
+
+"""
+Represents a 'moved_columns_in_project' event on a given issue or pull request.
+"""
+type MovedColumnsInProjectEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the MovedColumnsInProjectEvent object
+  """
+  id: ID!
+
+  """
+  Column name the issue or pull request was moved from.
+  """
+  previousProjectColumnName: String! @preview(toggledBy: "starfox-preview")
+
+  """
+  Project referenced by event.
+  """
+  project: Project @preview(toggledBy: "starfox-preview")
+
+  """
+  Project card referenced by this project event.
+  """
+  projectCard: ProjectCard @preview(toggledBy: "starfox-preview")
+
+  """
+  Column name the issue or pull request was moved to.
+  """
+  projectColumnName: String! @preview(toggledBy: "starfox-preview")
+}
+
+"""
+The root query for implementing GraphQL mutations.
+"""
+type Mutation {
+  """
+  Clear all of a customer's queued migrations
+  """
+  abortQueuedMigrations(
+    """
+    Parameters for AbortQueuedMigrations
+    """
+    input: AbortQueuedMigrationsInput!
+  ): AbortQueuedMigrationsPayload
+
+  """
+  Abort a repository migration queued or in progress.
+  """
+  abortRepositoryMigration(
+    """
+    Parameters for AbortRepositoryMigration
+    """
+    input: AbortRepositoryMigrationInput!
+  ): AbortRepositoryMigrationPayload
+
+  """
+  Accepts a pending invitation for a user to become an administrator of an enterprise.
+  """
+  acceptEnterpriseAdministratorInvitation(
+    """
+    Parameters for AcceptEnterpriseAdministratorInvitation
+    """
+    input: AcceptEnterpriseAdministratorInvitationInput!
+  ): AcceptEnterpriseAdministratorInvitationPayload
+
+  """
+  Applies a suggested topic to the repository.
+  """
+  acceptTopicSuggestion(
+    """
+    Parameters for AcceptTopicSuggestion
+    """
+    input: AcceptTopicSuggestionInput!
+  ): AcceptTopicSuggestionPayload
+
+  """
+  Adds assignees to an assignable object.
+  """
+  addAssigneesToAssignable(
+    """
+    Parameters for AddAssigneesToAssignable
+    """
+    input: AddAssigneesToAssignableInput!
+  ): AddAssigneesToAssignablePayload
+
+  """
+  Adds a comment to an Issue or Pull Request.
+  """
+  addComment(
+    """
+    Parameters for AddComment
+    """
+    input: AddCommentInput!
+  ): AddCommentPayload
+
+  """
+  Adds a comment to a Discussion, possibly as a reply to another comment.
+  """
+  addDiscussionComment(
+    """
+    Parameters for AddDiscussionComment
+    """
+    input: AddDiscussionCommentInput!
+  ): AddDiscussionCommentPayload
+
+  """
+  Vote for an option in a discussion poll.
+  """
+  addDiscussionPollVote(
+    """
+    Parameters for AddDiscussionPollVote
+    """
+    input: AddDiscussionPollVoteInput!
+  ): AddDiscussionPollVotePayload
+
+  """
+  Adds enterprise members to an organization within the enterprise.
+  """
+  addEnterpriseOrganizationMember(
+    """
+    Parameters for AddEnterpriseOrganizationMember
+    """
+    input: AddEnterpriseOrganizationMemberInput!
+  ): AddEnterpriseOrganizationMemberPayload
+
+  """
+  Adds a support entitlement to an enterprise member.
+  """
+  addEnterpriseSupportEntitlement(
+    """
+    Parameters for AddEnterpriseSupportEntitlement
+    """
+    input: AddEnterpriseSupportEntitlementInput!
+  ): AddEnterpriseSupportEntitlementPayload
+
+  """
+  Adds labels to a labelable object.
+  """
+  addLabelsToLabelable(
+    """
+    Parameters for AddLabelsToLabelable
+    """
+    input: AddLabelsToLabelableInput!
+  ): AddLabelsToLabelablePayload
+
+  """
+  Adds a card to a ProjectColumn. Either `contentId` or `note` must be provided but **not** both.
+  """
+  addProjectCard(
+    """
+    Parameters for AddProjectCard
+    """
+    input: AddProjectCardInput!
+  ): AddProjectCardPayload
+
+  """
+  Adds a column to a Project.
+  """
+  addProjectColumn(
+    """
+    Parameters for AddProjectColumn
+    """
+    input: AddProjectColumnInput!
+  ): AddProjectColumnPayload
+
+  """
+  Creates a new draft issue and add it to a Project.
+  """
+  addProjectV2DraftIssue(
+    """
+    Parameters for AddProjectV2DraftIssue
+    """
+    input: AddProjectV2DraftIssueInput!
+  ): AddProjectV2DraftIssuePayload
+
+  """
+  Links an existing content instance to a Project.
+  """
+  addProjectV2ItemById(
+    """
+    Parameters for AddProjectV2ItemById
+    """
+    input: AddProjectV2ItemByIdInput!
+  ): AddProjectV2ItemByIdPayload
+
+  """
+  Adds a review to a Pull Request.
+  """
+  addPullRequestReview(
+    """
+    Parameters for AddPullRequestReview
+    """
+    input: AddPullRequestReviewInput!
+  ): AddPullRequestReviewPayload
+
+  """
+  Adds a comment to a review.
+  """
+  addPullRequestReviewComment(
+    """
+    Parameters for AddPullRequestReviewComment
+    """
+    input: AddPullRequestReviewCommentInput!
+  ): AddPullRequestReviewCommentPayload
+
+  """
+  Adds a new thread to a pending Pull Request Review.
+  """
+  addPullRequestReviewThread(
+    """
+    Parameters for AddPullRequestReviewThread
+    """
+    input: AddPullRequestReviewThreadInput!
+  ): AddPullRequestReviewThreadPayload
+
+  """
+  Adds a reply to an existing Pull Request Review Thread.
+  """
+  addPullRequestReviewThreadReply(
+    """
+    Parameters for AddPullRequestReviewThreadReply
+    """
+    input: AddPullRequestReviewThreadReplyInput!
+  ): AddPullRequestReviewThreadReplyPayload
+
+  """
+  Adds a reaction to a subject.
+  """
+  addReaction(
+    """
+    Parameters for AddReaction
+    """
+    input: AddReactionInput!
+  ): AddReactionPayload
+
+  """
+  Adds a star to a Starrable.
+  """
+  addStar(
+    """
+    Parameters for AddStar
+    """
+    input: AddStarInput!
+  ): AddStarPayload
+
+  """
+  Add an upvote to a discussion or discussion comment.
+  """
+  addUpvote(
+    """
+    Parameters for AddUpvote
+    """
+    input: AddUpvoteInput!
+  ): AddUpvotePayload
+
+  """
+  Adds a verifiable domain to an owning account.
+  """
+  addVerifiableDomain(
+    """
+    Parameters for AddVerifiableDomain
+    """
+    input: AddVerifiableDomainInput!
+  ): AddVerifiableDomainPayload
+
+  """
+  Approve all pending deployments under one or more environments
+  """
+  approveDeployments(
+    """
+    Parameters for ApproveDeployments
+    """
+    input: ApproveDeploymentsInput!
+  ): ApproveDeploymentsPayload
+
+  """
+  Approve a verifiable domain for notification delivery.
+  """
+  approveVerifiableDomain(
+    """
+    Parameters for ApproveVerifiableDomain
+    """
+    input: ApproveVerifiableDomainInput!
+  ): ApproveVerifiableDomainPayload
+
+  """
+  Archives a ProjectV2Item
+  """
+  archiveProjectV2Item(
+    """
+    Parameters for ArchiveProjectV2Item
+    """
+    input: ArchiveProjectV2ItemInput!
+  ): ArchiveProjectV2ItemPayload
+
+  """
+  Marks a repository as archived.
+  """
+  archiveRepository(
+    """
+    Parameters for ArchiveRepository
+    """
+    input: ArchiveRepositoryInput!
+  ): ArchiveRepositoryPayload
+
+  """
+  Cancels a pending invitation for an administrator to join an enterprise.
+  """
+  cancelEnterpriseAdminInvitation(
+    """
+    Parameters for CancelEnterpriseAdminInvitation
+    """
+    input: CancelEnterpriseAdminInvitationInput!
+  ): CancelEnterpriseAdminInvitationPayload
+
+  """
+  Cancel an active sponsorship.
+  """
+  cancelSponsorship(
+    """
+    Parameters for CancelSponsorship
+    """
+    input: CancelSponsorshipInput!
+  ): CancelSponsorshipPayload
+
+  """
+  Update your status on GitHub.
+  """
+  changeUserStatus(
+    """
+    Parameters for ChangeUserStatus
+    """
+    input: ChangeUserStatusInput!
+  ): ChangeUserStatusPayload
+
+  """
+  Clears all labels from a labelable object.
+  """
+  clearLabelsFromLabelable(
+    """
+    Parameters for ClearLabelsFromLabelable
+    """
+    input: ClearLabelsFromLabelableInput!
+  ): ClearLabelsFromLabelablePayload
+
+  """
+  This mutation clears the value of a field for an item in a Project. Currently
+  only text, number, date, assignees, labels, single-select, iteration and
+  milestone fields are supported.
+  """
+  clearProjectV2ItemFieldValue(
+    """
+    Parameters for ClearProjectV2ItemFieldValue
+    """
+    input: ClearProjectV2ItemFieldValueInput!
+  ): ClearProjectV2ItemFieldValuePayload
+
+  """
+  Creates a new project by cloning configuration from an existing project.
+  """
+  cloneProject(
+    """
+    Parameters for CloneProject
+    """
+    input: CloneProjectInput!
+  ): CloneProjectPayload
+
+  """
+  Create a new repository with the same files and directory structure as a template repository.
+  """
+  cloneTemplateRepository(
+    """
+    Parameters for CloneTemplateRepository
+    """
+    input: CloneTemplateRepositoryInput!
+  ): CloneTemplateRepositoryPayload
+
+  """
+  Close a discussion.
+  """
+  closeDiscussion(
+    """
+    Parameters for CloseDiscussion
+    """
+    input: CloseDiscussionInput!
+  ): CloseDiscussionPayload
+
+  """
+  Close an issue.
+  """
+  closeIssue(
+    """
+    Parameters for CloseIssue
+    """
+    input: CloseIssueInput!
+  ): CloseIssuePayload
+
+  """
+  Close a pull request.
+  """
+  closePullRequest(
+    """
+    Parameters for ClosePullRequest
+    """
+    input: ClosePullRequestInput!
+  ): ClosePullRequestPayload
+
+  """
+  Convert a project note card to one associated with a newly created issue.
+  """
+  convertProjectCardNoteToIssue(
+    """
+    Parameters for ConvertProjectCardNoteToIssue
+    """
+    input: ConvertProjectCardNoteToIssueInput!
+  ): ConvertProjectCardNoteToIssuePayload
+
+  """
+  Converts a pull request to draft
+  """
+  convertPullRequestToDraft(
+    """
+    Parameters for ConvertPullRequestToDraft
+    """
+    input: ConvertPullRequestToDraftInput!
+  ): ConvertPullRequestToDraftPayload
+
+  """
+  Copy a project.
+  """
+  copyProjectV2(
+    """
+    Parameters for CopyProjectV2
+    """
+    input: CopyProjectV2Input!
+  ): CopyProjectV2Payload
+
+  """
+  Invites a user to claim reattributable data
+  """
+  createAttributionInvitation(
+    """
+    Parameters for CreateAttributionInvitation
+    """
+    input: CreateAttributionInvitationInput!
+  ): CreateAttributionInvitationPayload
+
+  """
+  Create a new branch protection rule
+  """
+  createBranchProtectionRule(
+    """
+    Parameters for CreateBranchProtectionRule
+    """
+    input: CreateBranchProtectionRuleInput!
+  ): CreateBranchProtectionRulePayload
+
+  """
+  Create a check run.
+  """
+  createCheckRun(
+    """
+    Parameters for CreateCheckRun
+    """
+    input: CreateCheckRunInput!
+  ): CreateCheckRunPayload
+
+  """
+  Create a check suite
+  """
+  createCheckSuite(
+    """
+    Parameters for CreateCheckSuite
+    """
+    input: CreateCheckSuiteInput!
+  ): CreateCheckSuitePayload
+
+  """
+  Appends a commit to the given branch as the authenticated user.
+
+  This mutation creates a commit whose parent is the HEAD of the provided
+  branch and also updates that branch to point to the new commit.
+  It can be thought of as similar to `git commit`.
+
+  ### Locating a Branch
+
+  Commits are appended to a `branch` of type `Ref`.
+  This must refer to a git branch (i.e.  the fully qualified path must
+  begin with `refs/heads/`, although including this prefix is optional.
+
+  Callers may specify the `branch` to commit to either by its global node
+  ID or by passing both of `repositoryNameWithOwner` and `refName`.  For
+  more details see the documentation for `CommittableBranch`.
+
+  ### Describing Changes
+
+  `fileChanges` are specified as a `FilesChanges` object describing
+  `FileAdditions` and `FileDeletions`.
+
+  Please see the documentation for `FileChanges` for more information on
+  how to use this argument to describe any set of file changes.
+
+  ### Authorship
+
+  Similar to the web commit interface, this mutation does not support
+  specifying the author or committer of the commit and will not add
+  support for this in the future.
+
+  A commit created by a successful execution of this mutation will be
+  authored by the owner of the credential which authenticates the API
+  request.  The committer will be identical to that of commits authored
+  using the web interface.
+
+  If you need full control over author and committer information, please
+  use the Git Database REST API instead.
+
+  ### Commit Signing
+
+  Commits made using this mutation are automatically signed by GitHub if
+  supported and will be marked as verified in the user interface.
+  """
+  createCommitOnBranch(
+    """
+    Parameters for CreateCommitOnBranch
+    """
+    input: CreateCommitOnBranchInput!
+  ): CreateCommitOnBranchPayload
+
+  """
+  Creates a new deployment event.
+  """
+  createDeployment(
+    """
+    Parameters for CreateDeployment
+    """
+    input: CreateDeploymentInput!
+  ): CreateDeploymentPayload @preview(toggledBy: "flash-preview")
+
+  """
+  Create a deployment status.
+  """
+  createDeploymentStatus(
+    """
+    Parameters for CreateDeploymentStatus
+    """
+    input: CreateDeploymentStatusInput!
+  ): CreateDeploymentStatusPayload @preview(toggledBy: "flash-preview")
+
+  """
+  Create a discussion.
+  """
+  createDiscussion(
+    """
+    Parameters for CreateDiscussion
+    """
+    input: CreateDiscussionInput!
+  ): CreateDiscussionPayload
+
+  """
+  Creates an organization as part of an enterprise account. A personal access
+  token used to create an organization is implicitly permitted to update the
+  organization it created, if the organization is part of an enterprise that has
+  SAML enabled or uses Enterprise Managed Users. If the organization is not part
+  of such an enterprise, and instead has SAML enabled for it individually, the
+  token will then require SAML authorization to continue working against that organization.
+  """
+  createEnterpriseOrganization(
+    """
+    Parameters for CreateEnterpriseOrganization
+    """
+    input: CreateEnterpriseOrganizationInput!
+  ): CreateEnterpriseOrganizationPayload
+
+  """
+  Creates an environment or simply returns it if already exists.
+  """
+  createEnvironment(
+    """
+    Parameters for CreateEnvironment
+    """
+    input: CreateEnvironmentInput!
+  ): CreateEnvironmentPayload
+
+  """
+  Creates a new IP allow list entry.
+  """
+  createIpAllowListEntry(
+    """
+    Parameters for CreateIpAllowListEntry
+    """
+    input: CreateIpAllowListEntryInput!
+  ): CreateIpAllowListEntryPayload
+
+  """
+  Creates a new issue.
+  """
+  createIssue(
+    """
+    Parameters for CreateIssue
+    """
+    input: CreateIssueInput!
+  ): CreateIssuePayload
+
+  """
+  Creates a new label.
+  """
+  createLabel(
+    """
+    Parameters for CreateLabel
+    """
+    input: CreateLabelInput!
+  ): CreateLabelPayload @preview(toggledBy: "bane-preview")
+
+  """
+  Create a branch linked to an issue.
+  """
+  createLinkedBranch(
+    """
+    Parameters for CreateLinkedBranch
+    """
+    input: CreateLinkedBranchInput!
+  ): CreateLinkedBranchPayload
+
+  """
+  Creates a GitHub Enterprise Importer (GEI) migration source.
+  """
+  createMigrationSource(
+    """
+    Parameters for CreateMigrationSource
+    """
+    input: CreateMigrationSourceInput!
+  ): CreateMigrationSourcePayload
+
+  """
+  Creates a new project.
+  """
+  createProject(
+    """
+    Parameters for CreateProject
+    """
+    input: CreateProjectInput!
+  ): CreateProjectPayload
+
+  """
+  Creates a new project.
+  """
+  createProjectV2(
+    """
+    Parameters for CreateProjectV2
+    """
+    input: CreateProjectV2Input!
+  ): CreateProjectV2Payload
+
+  """
+  Create a new project field.
+  """
+  createProjectV2Field(
+    """
+    Parameters for CreateProjectV2Field
+    """
+    input: CreateProjectV2FieldInput!
+  ): CreateProjectV2FieldPayload
+
+  """
+  Create a new pull request
+  """
+  createPullRequest(
+    """
+    Parameters for CreatePullRequest
+    """
+    input: CreatePullRequestInput!
+  ): CreatePullRequestPayload
+
+  """
+  Create a new Git Ref.
+  """
+  createRef(
+    """
+    Parameters for CreateRef
+    """
+    input: CreateRefInput!
+  ): CreateRefPayload
+
+  """
+  Create a new repository.
+  """
+  createRepository(
+    """
+    Parameters for CreateRepository
+    """
+    input: CreateRepositoryInput!
+  ): CreateRepositoryPayload
+
+  """
+  Create a repository ruleset
+  """
+  createRepositoryRuleset(
+    """
+    Parameters for CreateRepositoryRuleset
+    """
+    input: CreateRepositoryRulesetInput!
+  ): CreateRepositoryRulesetPayload
+
+  """
+  Create a GitHub Sponsors profile to allow others to sponsor you or your organization.
+  """
+  createSponsorsListing(
+    """
+    Parameters for CreateSponsorsListing
+    """
+    input: CreateSponsorsListingInput!
+  ): CreateSponsorsListingPayload
+
+  """
+  Create a new payment tier for your GitHub Sponsors profile.
+  """
+  createSponsorsTier(
+    """
+    Parameters for CreateSponsorsTier
+    """
+    input: CreateSponsorsTierInput!
+  ): CreateSponsorsTierPayload
+
+  """
+  Start a new sponsorship of a maintainer in GitHub Sponsors, or reactivate a past sponsorship.
+  """
+  createSponsorship(
+    """
+    Parameters for CreateSponsorship
+    """
+    input: CreateSponsorshipInput!
+  ): CreateSponsorshipPayload
+
+  """
+  Make many one-time sponsorships for different sponsorable users or
+  organizations at once. Can only sponsor those who have a public GitHub
+  Sponsors profile.
+  """
+  createSponsorships(
+    """
+    Parameters for CreateSponsorships
+    """
+    input: CreateSponsorshipsInput!
+  ): CreateSponsorshipsPayload
+
+  """
+  Creates a new team discussion.
+  """
+  createTeamDiscussion(
+    """
+    Parameters for CreateTeamDiscussion
+    """
+    input: CreateTeamDiscussionInput!
+  ): CreateTeamDiscussionPayload
+
+  """
+  Creates a new team discussion comment.
+  """
+  createTeamDiscussionComment(
+    """
+    Parameters for CreateTeamDiscussionComment
+    """
+    input: CreateTeamDiscussionCommentInput!
+  ): CreateTeamDiscussionCommentPayload
+
+  """
+  Rejects a suggested topic for the repository.
+  """
+  declineTopicSuggestion(
+    """
+    Parameters for DeclineTopicSuggestion
+    """
+    input: DeclineTopicSuggestionInput!
+  ): DeclineTopicSuggestionPayload
+
+  """
+  Delete a branch protection rule
+  """
+  deleteBranchProtectionRule(
+    """
+    Parameters for DeleteBranchProtectionRule
+    """
+    input: DeleteBranchProtectionRuleInput!
+  ): DeleteBranchProtectionRulePayload
+
+  """
+  Deletes a deployment.
+  """
+  deleteDeployment(
+    """
+    Parameters for DeleteDeployment
+    """
+    input: DeleteDeploymentInput!
+  ): DeleteDeploymentPayload
+
+  """
+  Delete a discussion and all of its replies.
+  """
+  deleteDiscussion(
+    """
+    Parameters for DeleteDiscussion
+    """
+    input: DeleteDiscussionInput!
+  ): DeleteDiscussionPayload
+
+  """
+  Delete a discussion comment. If it has replies, wipe it instead.
+  """
+  deleteDiscussionComment(
+    """
+    Parameters for DeleteDiscussionComment
+    """
+    input: DeleteDiscussionCommentInput!
+  ): DeleteDiscussionCommentPayload
+
+  """
+  Deletes an environment
+  """
+  deleteEnvironment(
+    """
+    Parameters for DeleteEnvironment
+    """
+    input: DeleteEnvironmentInput!
+  ): DeleteEnvironmentPayload
+
+  """
+  Deletes an IP allow list entry.
+  """
+  deleteIpAllowListEntry(
+    """
+    Parameters for DeleteIpAllowListEntry
+    """
+    input: DeleteIpAllowListEntryInput!
+  ): DeleteIpAllowListEntryPayload
+
+  """
+  Deletes an Issue object.
+  """
+  deleteIssue(
+    """
+    Parameters for DeleteIssue
+    """
+    input: DeleteIssueInput!
+  ): DeleteIssuePayload
+
+  """
+  Deletes an IssueComment object.
+  """
+  deleteIssueComment(
+    """
+    Parameters for DeleteIssueComment
+    """
+    input: DeleteIssueCommentInput!
+  ): DeleteIssueCommentPayload
+
+  """
+  Deletes a label.
+  """
+  deleteLabel(
+    """
+    Parameters for DeleteLabel
+    """
+    input: DeleteLabelInput!
+  ): DeleteLabelPayload @preview(toggledBy: "bane-preview")
+
+  """
+  Unlink a branch from an issue.
+  """
+  deleteLinkedBranch(
+    """
+    Parameters for DeleteLinkedBranch
+    """
+    input: DeleteLinkedBranchInput!
+  ): DeleteLinkedBranchPayload
+
+  """
+  Delete a package version.
+  """
+  deletePackageVersion(
+    """
+    Parameters for DeletePackageVersion
+    """
+    input: DeletePackageVersionInput!
+  ): DeletePackageVersionPayload @preview(toggledBy: "package-deletes-preview")
+
+  """
+  Deletes a project.
+  """
+  deleteProject(
+    """
+    Parameters for DeleteProject
+    """
+    input: DeleteProjectInput!
+  ): DeleteProjectPayload
+
+  """
+  Deletes a project card.
+  """
+  deleteProjectCard(
+    """
+    Parameters for DeleteProjectCard
+    """
+    input: DeleteProjectCardInput!
+  ): DeleteProjectCardPayload
+
+  """
+  Deletes a project column.
+  """
+  deleteProjectColumn(
+    """
+    Parameters for DeleteProjectColumn
+    """
+    input: DeleteProjectColumnInput!
+  ): DeleteProjectColumnPayload
+
+  """
+  Delete a project.
+  """
+  deleteProjectV2(
+    """
+    Parameters for DeleteProjectV2
+    """
+    input: DeleteProjectV2Input!
+  ): DeleteProjectV2Payload
+
+  """
+  Delete a project field.
+  """
+  deleteProjectV2Field(
+    """
+    Parameters for DeleteProjectV2Field
+    """
+    input: DeleteProjectV2FieldInput!
+  ): DeleteProjectV2FieldPayload
+
+  """
+  Deletes an item from a Project.
+  """
+  deleteProjectV2Item(
+    """
+    Parameters for DeleteProjectV2Item
+    """
+    input: DeleteProjectV2ItemInput!
+  ): DeleteProjectV2ItemPayload
+
+  """
+  Deletes a project workflow.
+  """
+  deleteProjectV2Workflow(
+    """
+    Parameters for DeleteProjectV2Workflow
+    """
+    input: DeleteProjectV2WorkflowInput!
+  ): DeleteProjectV2WorkflowPayload
+
+  """
+  Deletes a pull request review.
+  """
+  deletePullRequestReview(
+    """
+    Parameters for DeletePullRequestReview
+    """
+    input: DeletePullRequestReviewInput!
+  ): DeletePullRequestReviewPayload
+
+  """
+  Deletes a pull request review comment.
+  """
+  deletePullRequestReviewComment(
+    """
+    Parameters for DeletePullRequestReviewComment
+    """
+    input: DeletePullRequestReviewCommentInput!
+  ): DeletePullRequestReviewCommentPayload
+
+  """
+  Delete a Git Ref.
+  """
+  deleteRef(
+    """
+    Parameters for DeleteRef
+    """
+    input: DeleteRefInput!
+  ): DeleteRefPayload
+
+  """
+  Delete a repository ruleset
+  """
+  deleteRepositoryRuleset(
+    """
+    Parameters for DeleteRepositoryRuleset
+    """
+    input: DeleteRepositoryRulesetInput!
+  ): DeleteRepositoryRulesetPayload
+
+  """
+  Deletes a team discussion.
+  """
+  deleteTeamDiscussion(
+    """
+    Parameters for DeleteTeamDiscussion
+    """
+    input: DeleteTeamDiscussionInput!
+  ): DeleteTeamDiscussionPayload
+
+  """
+  Deletes a team discussion comment.
+  """
+  deleteTeamDiscussionComment(
+    """
+    Parameters for DeleteTeamDiscussionComment
+    """
+    input: DeleteTeamDiscussionCommentInput!
+  ): DeleteTeamDiscussionCommentPayload
+
+  """
+  Deletes a verifiable domain.
+  """
+  deleteVerifiableDomain(
+    """
+    Parameters for DeleteVerifiableDomain
+    """
+    input: DeleteVerifiableDomainInput!
+  ): DeleteVerifiableDomainPayload
+
+  """
+  Remove a pull request from the merge queue.
+  """
+  dequeuePullRequest(
+    """
+    Parameters for DequeuePullRequest
+    """
+    input: DequeuePullRequestInput!
+  ): DequeuePullRequestPayload
+
+  """
+  Disable auto merge on the given pull request
+  """
+  disablePullRequestAutoMerge(
+    """
+    Parameters for DisablePullRequestAutoMerge
+    """
+    input: DisablePullRequestAutoMergeInput!
+  ): DisablePullRequestAutoMergePayload
+
+  """
+  Dismisses an approved or rejected pull request review.
+  """
+  dismissPullRequestReview(
+    """
+    Parameters for DismissPullRequestReview
+    """
+    input: DismissPullRequestReviewInput!
+  ): DismissPullRequestReviewPayload
+
+  """
+  Dismisses the Dependabot alert.
+  """
+  dismissRepositoryVulnerabilityAlert(
+    """
+    Parameters for DismissRepositoryVulnerabilityAlert
+    """
+    input: DismissRepositoryVulnerabilityAlertInput!
+  ): DismissRepositoryVulnerabilityAlertPayload
+
+  """
+  Enable the default auto-merge on a pull request.
+  """
+  enablePullRequestAutoMerge(
+    """
+    Parameters for EnablePullRequestAutoMerge
+    """
+    input: EnablePullRequestAutoMergeInput!
+  ): EnablePullRequestAutoMergePayload
+
+  """
+  Add a pull request to the merge queue.
+  """
+  enqueuePullRequest(
+    """
+    Parameters for EnqueuePullRequest
+    """
+    input: EnqueuePullRequestInput!
+  ): EnqueuePullRequestPayload
+
+  """
+  Follow an organization.
+  """
+  followOrganization(
+    """
+    Parameters for FollowOrganization
+    """
+    input: FollowOrganizationInput!
+  ): FollowOrganizationPayload
+
+  """
+  Follow a user.
+  """
+  followUser(
+    """
+    Parameters for FollowUser
+    """
+    input: FollowUserInput!
+  ): FollowUserPayload
+
+  """
+  Grant the migrator role to a user for all organizations under an enterprise account.
+  """
+  grantEnterpriseOrganizationsMigratorRole(
+    """
+    Parameters for GrantEnterpriseOrganizationsMigratorRole
+    """
+    input: GrantEnterpriseOrganizationsMigratorRoleInput!
+  ): GrantEnterpriseOrganizationsMigratorRolePayload
+
+  """
+  Grant the migrator role to a user or a team.
+  """
+  grantMigratorRole(
+    """
+    Parameters for GrantMigratorRole
+    """
+    input: GrantMigratorRoleInput!
+  ): GrantMigratorRolePayload
+
+  """
+  Creates a new project by importing columns and a list of issues/PRs.
+  """
+  importProject(
+    """
+    Parameters for ImportProject
+    """
+    input: ImportProjectInput!
+  ): ImportProjectPayload @preview(toggledBy: "slothette-preview")
+
+  """
+  Invite someone to become an administrator of the enterprise.
+  """
+  inviteEnterpriseAdmin(
+    """
+    Parameters for InviteEnterpriseAdmin
+    """
+    input: InviteEnterpriseAdminInput!
+  ): InviteEnterpriseAdminPayload
+
+  """
+  Links a project to a repository.
+  """
+  linkProjectV2ToRepository(
+    """
+    Parameters for LinkProjectV2ToRepository
+    """
+    input: LinkProjectV2ToRepositoryInput!
+  ): LinkProjectV2ToRepositoryPayload
+
+  """
+  Links a project to a team.
+  """
+  linkProjectV2ToTeam(
+    """
+    Parameters for LinkProjectV2ToTeam
+    """
+    input: LinkProjectV2ToTeamInput!
+  ): LinkProjectV2ToTeamPayload
+
+  """
+  Creates a repository link for a project.
+  """
+  linkRepositoryToProject(
+    """
+    Parameters for LinkRepositoryToProject
+    """
+    input: LinkRepositoryToProjectInput!
+  ): LinkRepositoryToProjectPayload
+
+  """
+  Lock a lockable object
+  """
+  lockLockable(
+    """
+    Parameters for LockLockable
+    """
+    input: LockLockableInput!
+  ): LockLockablePayload
+
+  """
+  Mark a discussion comment as the chosen answer for discussions in an answerable category.
+  """
+  markDiscussionCommentAsAnswer(
+    """
+    Parameters for MarkDiscussionCommentAsAnswer
+    """
+    input: MarkDiscussionCommentAsAnswerInput!
+  ): MarkDiscussionCommentAsAnswerPayload
+
+  """
+  Mark a pull request file as viewed
+  """
+  markFileAsViewed(
+    """
+    Parameters for MarkFileAsViewed
+    """
+    input: MarkFileAsViewedInput!
+  ): MarkFileAsViewedPayload
+
+  """
+  Mark a project as a template. Note that only projects which are owned by an Organization can be marked as a template.
+  """
+  markProjectV2AsTemplate(
+    """
+    Parameters for MarkProjectV2AsTemplate
+    """
+    input: MarkProjectV2AsTemplateInput!
+  ): MarkProjectV2AsTemplatePayload
+
+  """
+  Marks a pull request ready for review.
+  """
+  markPullRequestReadyForReview(
+    """
+    Parameters for MarkPullRequestReadyForReview
+    """
+    input: MarkPullRequestReadyForReviewInput!
+  ): MarkPullRequestReadyForReviewPayload
+
+  """
+  Merge a head into a branch.
+  """
+  mergeBranch(
+    """
+    Parameters for MergeBranch
+    """
+    input: MergeBranchInput!
+  ): MergeBranchPayload
+
+  """
+  Merge a pull request.
+  """
+  mergePullRequest(
+    """
+    Parameters for MergePullRequest
+    """
+    input: MergePullRequestInput!
+  ): MergePullRequestPayload
+
+  """
+  Minimizes a comment on an Issue, Commit, Pull Request, or Gist
+  """
+  minimizeComment(
+    """
+    Parameters for MinimizeComment
+    """
+    input: MinimizeCommentInput!
+  ): MinimizeCommentPayload
+
+  """
+  Moves a project card to another place.
+  """
+  moveProjectCard(
+    """
+    Parameters for MoveProjectCard
+    """
+    input: MoveProjectCardInput!
+  ): MoveProjectCardPayload
+
+  """
+  Moves a project column to another place.
+  """
+  moveProjectColumn(
+    """
+    Parameters for MoveProjectColumn
+    """
+    input: MoveProjectColumnInput!
+  ): MoveProjectColumnPayload
+
+  """
+  Pin an issue to a repository
+  """
+  pinIssue(
+    """
+    Parameters for PinIssue
+    """
+    input: PinIssueInput!
+  ): PinIssuePayload
+
+  """
+  Publish an existing sponsorship tier that is currently still a draft to a GitHub Sponsors profile.
+  """
+  publishSponsorsTier(
+    """
+    Parameters for PublishSponsorsTier
+    """
+    input: PublishSponsorsTierInput!
+  ): PublishSponsorsTierPayload
+
+  """
+  Regenerates the identity provider recovery codes for an enterprise
+  """
+  regenerateEnterpriseIdentityProviderRecoveryCodes(
+    """
+    Parameters for RegenerateEnterpriseIdentityProviderRecoveryCodes
+    """
+    input: RegenerateEnterpriseIdentityProviderRecoveryCodesInput!
+  ): RegenerateEnterpriseIdentityProviderRecoveryCodesPayload
+
+  """
+  Regenerates a verifiable domain's verification token.
+  """
+  regenerateVerifiableDomainToken(
+    """
+    Parameters for RegenerateVerifiableDomainToken
+    """
+    input: RegenerateVerifiableDomainTokenInput!
+  ): RegenerateVerifiableDomainTokenPayload
+
+  """
+  Reject all pending deployments under one or more environments
+  """
+  rejectDeployments(
+    """
+    Parameters for RejectDeployments
+    """
+    input: RejectDeploymentsInput!
+  ): RejectDeploymentsPayload
+
+  """
+  Removes assignees from an assignable object.
+  """
+  removeAssigneesFromAssignable(
+    """
+    Parameters for RemoveAssigneesFromAssignable
+    """
+    input: RemoveAssigneesFromAssignableInput!
+  ): RemoveAssigneesFromAssignablePayload
+
+  """
+  Removes an administrator from the enterprise.
+  """
+  removeEnterpriseAdmin(
+    """
+    Parameters for RemoveEnterpriseAdmin
+    """
+    input: RemoveEnterpriseAdminInput!
+  ): RemoveEnterpriseAdminPayload
+
+  """
+  Removes the identity provider from an enterprise
+  """
+  removeEnterpriseIdentityProvider(
+    """
+    Parameters for RemoveEnterpriseIdentityProvider
+    """
+    input: RemoveEnterpriseIdentityProviderInput!
+  ): RemoveEnterpriseIdentityProviderPayload
+
+  """
+  Removes a user from all organizations within the enterprise
+  """
+  removeEnterpriseMember(
+    """
+    Parameters for RemoveEnterpriseMember
+    """
+    input: RemoveEnterpriseMemberInput!
+  ): RemoveEnterpriseMemberPayload
+
+  """
+  Removes an organization from the enterprise
+  """
+  removeEnterpriseOrganization(
+    """
+    Parameters for RemoveEnterpriseOrganization
+    """
+    input: RemoveEnterpriseOrganizationInput!
+  ): RemoveEnterpriseOrganizationPayload
+
+  """
+  Removes a support entitlement from an enterprise member.
+  """
+  removeEnterpriseSupportEntitlement(
+    """
+    Parameters for RemoveEnterpriseSupportEntitlement
+    """
+    input: RemoveEnterpriseSupportEntitlementInput!
+  ): RemoveEnterpriseSupportEntitlementPayload
+
+  """
+  Removes labels from a Labelable object.
+  """
+  removeLabelsFromLabelable(
+    """
+    Parameters for RemoveLabelsFromLabelable
+    """
+    input: RemoveLabelsFromLabelableInput!
+  ): RemoveLabelsFromLabelablePayload
+
+  """
+  Removes outside collaborator from all repositories in an organization.
+  """
+  removeOutsideCollaborator(
+    """
+    Parameters for RemoveOutsideCollaborator
+    """
+    input: RemoveOutsideCollaboratorInput!
+  ): RemoveOutsideCollaboratorPayload
+
+  """
+  Removes a reaction from a subject.
+  """
+  removeReaction(
+    """
+    Parameters for RemoveReaction
+    """
+    input: RemoveReactionInput!
+  ): RemoveReactionPayload
+
+  """
+  Removes a star from a Starrable.
+  """
+  removeStar(
+    """
+    Parameters for RemoveStar
+    """
+    input: RemoveStarInput!
+  ): RemoveStarPayload
+
+  """
+  Remove an upvote to a discussion or discussion comment.
+  """
+  removeUpvote(
+    """
+    Parameters for RemoveUpvote
+    """
+    input: RemoveUpvoteInput!
+  ): RemoveUpvotePayload
+
+  """
+  Reopen a discussion.
+  """
+  reopenDiscussion(
+    """
+    Parameters for ReopenDiscussion
+    """
+    input: ReopenDiscussionInput!
+  ): ReopenDiscussionPayload
+
+  """
+  Reopen a issue.
+  """
+  reopenIssue(
+    """
+    Parameters for ReopenIssue
+    """
+    input: ReopenIssueInput!
+  ): ReopenIssuePayload
+
+  """
+  Reopen a pull request.
+  """
+  reopenPullRequest(
+    """
+    Parameters for ReopenPullRequest
+    """
+    input: ReopenPullRequestInput!
+  ): ReopenPullRequestPayload
+
+  """
+  Set review requests on a pull request.
+  """
+  requestReviews(
+    """
+    Parameters for RequestReviews
+    """
+    input: RequestReviewsInput!
+  ): RequestReviewsPayload
+
+  """
+  Rerequests an existing check suite.
+  """
+  rerequestCheckSuite(
+    """
+    Parameters for RerequestCheckSuite
+    """
+    input: RerequestCheckSuiteInput!
+  ): RerequestCheckSuitePayload
+
+  """
+  Marks a review thread as resolved.
+  """
+  resolveReviewThread(
+    """
+    Parameters for ResolveReviewThread
+    """
+    input: ResolveReviewThreadInput!
+  ): ResolveReviewThreadPayload
+
+  """
+  Retire a published payment tier from your GitHub Sponsors profile so it cannot be used to start new sponsorships.
+  """
+  retireSponsorsTier(
+    """
+    Parameters for RetireSponsorsTier
+    """
+    input: RetireSponsorsTierInput!
+  ): RetireSponsorsTierPayload
+
+  """
+  Create a pull request that reverts the changes from a merged pull request.
+  """
+  revertPullRequest(
+    """
+    Parameters for RevertPullRequest
+    """
+    input: RevertPullRequestInput!
+  ): RevertPullRequestPayload
+
+  """
+  Revoke the migrator role to a user for all organizations under an enterprise account.
+  """
+  revokeEnterpriseOrganizationsMigratorRole(
+    """
+    Parameters for RevokeEnterpriseOrganizationsMigratorRole
+    """
+    input: RevokeEnterpriseOrganizationsMigratorRoleInput!
+  ): RevokeEnterpriseOrganizationsMigratorRolePayload
+
+  """
+  Revoke the migrator role from a user or a team.
+  """
+  revokeMigratorRole(
+    """
+    Parameters for RevokeMigratorRole
+    """
+    input: RevokeMigratorRoleInput!
+  ): RevokeMigratorRolePayload
+
+  """
+  Creates or updates the identity provider for an enterprise.
+  """
+  setEnterpriseIdentityProvider(
+    """
+    Parameters for SetEnterpriseIdentityProvider
+    """
+    input: SetEnterpriseIdentityProviderInput!
+  ): SetEnterpriseIdentityProviderPayload
+
+  """
+  Set an organization level interaction limit for an organization's public repositories.
+  """
+  setOrganizationInteractionLimit(
+    """
+    Parameters for SetOrganizationInteractionLimit
+    """
+    input: SetOrganizationInteractionLimitInput!
+  ): SetOrganizationInteractionLimitPayload
+
+  """
+  Sets an interaction limit setting for a repository.
+  """
+  setRepositoryInteractionLimit(
+    """
+    Parameters for SetRepositoryInteractionLimit
+    """
+    input: SetRepositoryInteractionLimitInput!
+  ): SetRepositoryInteractionLimitPayload
+
+  """
+  Set a user level interaction limit for an user's public repositories.
+  """
+  setUserInteractionLimit(
+    """
+    Parameters for SetUserInteractionLimit
+    """
+    input: SetUserInteractionLimitInput!
+  ): SetUserInteractionLimitPayload
+
+  """
+  Starts a GitHub Enterprise Importer organization migration.
+  """
+  startOrganizationMigration(
+    """
+    Parameters for StartOrganizationMigration
+    """
+    input: StartOrganizationMigrationInput!
+  ): StartOrganizationMigrationPayload
+
+  """
+  Starts a GitHub Enterprise Importer (GEI) repository migration.
+  """
+  startRepositoryMigration(
+    """
+    Parameters for StartRepositoryMigration
+    """
+    input: StartRepositoryMigrationInput!
+  ): StartRepositoryMigrationPayload
+
+  """
+  Submits a pending pull request review.
+  """
+  submitPullRequestReview(
+    """
+    Parameters for SubmitPullRequestReview
+    """
+    input: SubmitPullRequestReviewInput!
+  ): SubmitPullRequestReviewPayload
+
+  """
+  Transfer an organization from one enterprise to another enterprise.
+  """
+  transferEnterpriseOrganization(
+    """
+    Parameters for TransferEnterpriseOrganization
+    """
+    input: TransferEnterpriseOrganizationInput!
+  ): TransferEnterpriseOrganizationPayload
+
+  """
+  Transfer an issue to a different repository
+  """
+  transferIssue(
+    """
+    Parameters for TransferIssue
+    """
+    input: TransferIssueInput!
+  ): TransferIssuePayload
+
+  """
+  Unarchives a ProjectV2Item
+  """
+  unarchiveProjectV2Item(
+    """
+    Parameters for UnarchiveProjectV2Item
+    """
+    input: UnarchiveProjectV2ItemInput!
+  ): UnarchiveProjectV2ItemPayload
+
+  """
+  Unarchives a repository.
+  """
+  unarchiveRepository(
+    """
+    Parameters for UnarchiveRepository
+    """
+    input: UnarchiveRepositoryInput!
+  ): UnarchiveRepositoryPayload
+
+  """
+  Unfollow an organization.
+  """
+  unfollowOrganization(
+    """
+    Parameters for UnfollowOrganization
+    """
+    input: UnfollowOrganizationInput!
+  ): UnfollowOrganizationPayload
+
+  """
+  Unfollow a user.
+  """
+  unfollowUser(
+    """
+    Parameters for UnfollowUser
+    """
+    input: UnfollowUserInput!
+  ): UnfollowUserPayload
+
+  """
+  Unlinks a project from a repository.
+  """
+  unlinkProjectV2FromRepository(
+    """
+    Parameters for UnlinkProjectV2FromRepository
+    """
+    input: UnlinkProjectV2FromRepositoryInput!
+  ): UnlinkProjectV2FromRepositoryPayload
+
+  """
+  Unlinks a project to a team.
+  """
+  unlinkProjectV2FromTeam(
+    """
+    Parameters for UnlinkProjectV2FromTeam
+    """
+    input: UnlinkProjectV2FromTeamInput!
+  ): UnlinkProjectV2FromTeamPayload
+
+  """
+  Deletes a repository link from a project.
+  """
+  unlinkRepositoryFromProject(
+    """
+    Parameters for UnlinkRepositoryFromProject
+    """
+    input: UnlinkRepositoryFromProjectInput!
+  ): UnlinkRepositoryFromProjectPayload
+
+  """
+  Unlock a lockable object
+  """
+  unlockLockable(
+    """
+    Parameters for UnlockLockable
+    """
+    input: UnlockLockableInput!
+  ): UnlockLockablePayload
+
+  """
+  Unmark a discussion comment as the chosen answer for discussions in an answerable category.
+  """
+  unmarkDiscussionCommentAsAnswer(
+    """
+    Parameters for UnmarkDiscussionCommentAsAnswer
+    """
+    input: UnmarkDiscussionCommentAsAnswerInput!
+  ): UnmarkDiscussionCommentAsAnswerPayload
+
+  """
+  Unmark a pull request file as viewed
+  """
+  unmarkFileAsViewed(
+    """
+    Parameters for UnmarkFileAsViewed
+    """
+    input: UnmarkFileAsViewedInput!
+  ): UnmarkFileAsViewedPayload
+
+  """
+  Unmark an issue as a duplicate of another issue.
+  """
+  unmarkIssueAsDuplicate(
+    """
+    Parameters for UnmarkIssueAsDuplicate
+    """
+    input: UnmarkIssueAsDuplicateInput!
+  ): UnmarkIssueAsDuplicatePayload
+
+  """
+  Unmark a project as a template.
+  """
+  unmarkProjectV2AsTemplate(
+    """
+    Parameters for UnmarkProjectV2AsTemplate
+    """
+    input: UnmarkProjectV2AsTemplateInput!
+  ): UnmarkProjectV2AsTemplatePayload
+
+  """
+  Unminimizes a comment on an Issue, Commit, Pull Request, or Gist
+  """
+  unminimizeComment(
+    """
+    Parameters for UnminimizeComment
+    """
+    input: UnminimizeCommentInput!
+  ): UnminimizeCommentPayload
+
+  """
+  Unpin a pinned issue from a repository
+  """
+  unpinIssue(
+    """
+    Parameters for UnpinIssue
+    """
+    input: UnpinIssueInput!
+  ): UnpinIssuePayload
+
+  """
+  Marks a review thread as unresolved.
+  """
+  unresolveReviewThread(
+    """
+    Parameters for UnresolveReviewThread
+    """
+    input: UnresolveReviewThreadInput!
+  ): UnresolveReviewThreadPayload
+
+  """
+  Update a branch protection rule
+  """
+  updateBranchProtectionRule(
+    """
+    Parameters for UpdateBranchProtectionRule
+    """
+    input: UpdateBranchProtectionRuleInput!
+  ): UpdateBranchProtectionRulePayload
+
+  """
+  Update a check run
+  """
+  updateCheckRun(
+    """
+    Parameters for UpdateCheckRun
+    """
+    input: UpdateCheckRunInput!
+  ): UpdateCheckRunPayload
+
+  """
+  Modifies the settings of an existing check suite
+  """
+  updateCheckSuitePreferences(
+    """
+    Parameters for UpdateCheckSuitePreferences
+    """
+    input: UpdateCheckSuitePreferencesInput!
+  ): UpdateCheckSuitePreferencesPayload
+
+  """
+  Update a discussion
+  """
+  updateDiscussion(
+    """
+    Parameters for UpdateDiscussion
+    """
+    input: UpdateDiscussionInput!
+  ): UpdateDiscussionPayload
+
+  """
+  Update the contents of a comment on a Discussion
+  """
+  updateDiscussionComment(
+    """
+    Parameters for UpdateDiscussionComment
+    """
+    input: UpdateDiscussionCommentInput!
+  ): UpdateDiscussionCommentPayload
+
+  """
+  Updates the role of an enterprise administrator.
+  """
+  updateEnterpriseAdministratorRole(
+    """
+    Parameters for UpdateEnterpriseAdministratorRole
+    """
+    input: UpdateEnterpriseAdministratorRoleInput!
+  ): UpdateEnterpriseAdministratorRolePayload
+
+  """
+  Sets whether private repository forks are enabled for an enterprise.
+  """
+  updateEnterpriseAllowPrivateRepositoryForkingSetting(
+    """
+    Parameters for UpdateEnterpriseAllowPrivateRepositoryForkingSetting
+    """
+    input: UpdateEnterpriseAllowPrivateRepositoryForkingSettingInput!
+  ): UpdateEnterpriseAllowPrivateRepositoryForkingSettingPayload
+
+  """
+  Sets the base repository permission for organizations in an enterprise.
+  """
+  updateEnterpriseDefaultRepositoryPermissionSetting(
+    """
+    Parameters for UpdateEnterpriseDefaultRepositoryPermissionSetting
+    """
+    input: UpdateEnterpriseDefaultRepositoryPermissionSettingInput!
+  ): UpdateEnterpriseDefaultRepositoryPermissionSettingPayload
+
+  """
+  Sets whether organization members with admin permissions on a repository can change repository visibility.
+  """
+  updateEnterpriseMembersCanChangeRepositoryVisibilitySetting(
+    """
+    Parameters for UpdateEnterpriseMembersCanChangeRepositoryVisibilitySetting
+    """
+    input: UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingInput!
+  ): UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingPayload
+
+  """
+  Sets the members can create repositories setting for an enterprise.
+  """
+  updateEnterpriseMembersCanCreateRepositoriesSetting(
+    """
+    Parameters for UpdateEnterpriseMembersCanCreateRepositoriesSetting
+    """
+    input: UpdateEnterpriseMembersCanCreateRepositoriesSettingInput!
+  ): UpdateEnterpriseMembersCanCreateRepositoriesSettingPayload
+
+  """
+  Sets the members can delete issues setting for an enterprise.
+  """
+  updateEnterpriseMembersCanDeleteIssuesSetting(
+    """
+    Parameters for UpdateEnterpriseMembersCanDeleteIssuesSetting
+    """
+    input: UpdateEnterpriseMembersCanDeleteIssuesSettingInput!
+  ): UpdateEnterpriseMembersCanDeleteIssuesSettingPayload
+
+  """
+  Sets the members can delete repositories setting for an enterprise.
+  """
+  updateEnterpriseMembersCanDeleteRepositoriesSetting(
+    """
+    Parameters for UpdateEnterpriseMembersCanDeleteRepositoriesSetting
+    """
+    input: UpdateEnterpriseMembersCanDeleteRepositoriesSettingInput!
+  ): UpdateEnterpriseMembersCanDeleteRepositoriesSettingPayload
+
+  """
+  Sets whether members can invite collaborators are enabled for an enterprise.
+  """
+  updateEnterpriseMembersCanInviteCollaboratorsSetting(
+    """
+    Parameters for UpdateEnterpriseMembersCanInviteCollaboratorsSetting
+    """
+    input: UpdateEnterpriseMembersCanInviteCollaboratorsSettingInput!
+  ): UpdateEnterpriseMembersCanInviteCollaboratorsSettingPayload
+
+  """
+  Sets whether or not an organization owner can make purchases.
+  """
+  updateEnterpriseMembersCanMakePurchasesSetting(
+    """
+    Parameters for UpdateEnterpriseMembersCanMakePurchasesSetting
+    """
+    input: UpdateEnterpriseMembersCanMakePurchasesSettingInput!
+  ): UpdateEnterpriseMembersCanMakePurchasesSettingPayload
+
+  """
+  Sets the members can update protected branches setting for an enterprise.
+  """
+  updateEnterpriseMembersCanUpdateProtectedBranchesSetting(
+    """
+    Parameters for UpdateEnterpriseMembersCanUpdateProtectedBranchesSetting
+    """
+    input: UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingInput!
+  ): UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingPayload
+
+  """
+  Sets the members can view dependency insights for an enterprise.
+  """
+  updateEnterpriseMembersCanViewDependencyInsightsSetting(
+    """
+    Parameters for UpdateEnterpriseMembersCanViewDependencyInsightsSetting
+    """
+    input: UpdateEnterpriseMembersCanViewDependencyInsightsSettingInput!
+  ): UpdateEnterpriseMembersCanViewDependencyInsightsSettingPayload
+
+  """
+  Sets whether organization projects are enabled for an enterprise.
+  """
+  updateEnterpriseOrganizationProjectsSetting(
+    """
+    Parameters for UpdateEnterpriseOrganizationProjectsSetting
+    """
+    input: UpdateEnterpriseOrganizationProjectsSettingInput!
+  ): UpdateEnterpriseOrganizationProjectsSettingPayload
+
+  """
+  Updates the role of an enterprise owner with an organization.
+  """
+  updateEnterpriseOwnerOrganizationRole(
+    """
+    Parameters for UpdateEnterpriseOwnerOrganizationRole
+    """
+    input: UpdateEnterpriseOwnerOrganizationRoleInput!
+  ): UpdateEnterpriseOwnerOrganizationRolePayload
+
+  """
+  Updates an enterprise's profile.
+  """
+  updateEnterpriseProfile(
+    """
+    Parameters for UpdateEnterpriseProfile
+    """
+    input: UpdateEnterpriseProfileInput!
+  ): UpdateEnterpriseProfilePayload
+
+  """
+  Sets whether repository projects are enabled for a enterprise.
+  """
+  updateEnterpriseRepositoryProjectsSetting(
+    """
+    Parameters for UpdateEnterpriseRepositoryProjectsSetting
+    """
+    input: UpdateEnterpriseRepositoryProjectsSettingInput!
+  ): UpdateEnterpriseRepositoryProjectsSettingPayload
+
+  """
+  Sets whether team discussions are enabled for an enterprise.
+  """
+  updateEnterpriseTeamDiscussionsSetting(
+    """
+    Parameters for UpdateEnterpriseTeamDiscussionsSetting
+    """
+    input: UpdateEnterpriseTeamDiscussionsSettingInput!
+  ): UpdateEnterpriseTeamDiscussionsSettingPayload
+
+  """
+  Sets whether two factor authentication is required for all users in an enterprise.
+  """
+  updateEnterpriseTwoFactorAuthenticationRequiredSetting(
+    """
+    Parameters for UpdateEnterpriseTwoFactorAuthenticationRequiredSetting
+    """
+    input: UpdateEnterpriseTwoFactorAuthenticationRequiredSettingInput!
+  ): UpdateEnterpriseTwoFactorAuthenticationRequiredSettingPayload
+
+  """
+  Updates an environment.
+  """
+  updateEnvironment(
+    """
+    Parameters for UpdateEnvironment
+    """
+    input: UpdateEnvironmentInput!
+  ): UpdateEnvironmentPayload
+
+  """
+  Sets whether an IP allow list is enabled on an owner.
+  """
+  updateIpAllowListEnabledSetting(
+    """
+    Parameters for UpdateIpAllowListEnabledSetting
+    """
+    input: UpdateIpAllowListEnabledSettingInput!
+  ): UpdateIpAllowListEnabledSettingPayload
+
+  """
+  Updates an IP allow list entry.
+  """
+  updateIpAllowListEntry(
+    """
+    Parameters for UpdateIpAllowListEntry
+    """
+    input: UpdateIpAllowListEntryInput!
+  ): UpdateIpAllowListEntryPayload
+
+  """
+  Sets whether IP allow list configuration for installed GitHub Apps is enabled on an owner.
+  """
+  updateIpAllowListForInstalledAppsEnabledSetting(
+    """
+    Parameters for UpdateIpAllowListForInstalledAppsEnabledSetting
+    """
+    input: UpdateIpAllowListForInstalledAppsEnabledSettingInput!
+  ): UpdateIpAllowListForInstalledAppsEnabledSettingPayload
+
+  """
+  Updates an Issue.
+  """
+  updateIssue(
+    """
+    Parameters for UpdateIssue
+    """
+    input: UpdateIssueInput!
+  ): UpdateIssuePayload
+
+  """
+  Updates an IssueComment object.
+  """
+  updateIssueComment(
+    """
+    Parameters for UpdateIssueComment
+    """
+    input: UpdateIssueCommentInput!
+  ): UpdateIssueCommentPayload
+
+  """
+  Updates an existing label.
+  """
+  updateLabel(
+    """
+    Parameters for UpdateLabel
+    """
+    input: UpdateLabelInput!
+  ): UpdateLabelPayload @preview(toggledBy: "bane-preview")
+
+  """
+  Update the setting to restrict notifications to only verified or approved domains available to an owner.
+  """
+  updateNotificationRestrictionSetting(
+    """
+    Parameters for UpdateNotificationRestrictionSetting
+    """
+    input: UpdateNotificationRestrictionSettingInput!
+  ): UpdateNotificationRestrictionSettingPayload
+
+  """
+  Sets whether private repository forks are enabled for an organization.
+  """
+  updateOrganizationAllowPrivateRepositoryForkingSetting(
+    """
+    Parameters for UpdateOrganizationAllowPrivateRepositoryForkingSetting
+    """
+    input: UpdateOrganizationAllowPrivateRepositoryForkingSettingInput!
+  ): UpdateOrganizationAllowPrivateRepositoryForkingSettingPayload
+
+  """
+  Sets whether contributors are required to sign off on web-based commits for repositories in an organization.
+  """
+  updateOrganizationWebCommitSignoffSetting(
+    """
+    Parameters for UpdateOrganizationWebCommitSignoffSetting
+    """
+    input: UpdateOrganizationWebCommitSignoffSettingInput!
+  ): UpdateOrganizationWebCommitSignoffSettingPayload
+
+  """
+  Toggle the setting for your GitHub Sponsors profile that allows other GitHub
+  accounts to sponsor you on GitHub while paying for the sponsorship on Patreon.
+  Only applicable when you have a GitHub Sponsors profile and have connected
+  your GitHub account with Patreon.
+  """
+  updatePatreonSponsorability(
+    """
+    Parameters for UpdatePatreonSponsorability
+    """
+    input: UpdatePatreonSponsorabilityInput!
+  ): UpdatePatreonSponsorabilityPayload
+
+  """
+  Updates an existing project.
+  """
+  updateProject(
+    """
+    Parameters for UpdateProject
+    """
+    input: UpdateProjectInput!
+  ): UpdateProjectPayload
+
+  """
+  Updates an existing project card.
+  """
+  updateProjectCard(
+    """
+    Parameters for UpdateProjectCard
+    """
+    input: UpdateProjectCardInput!
+  ): UpdateProjectCardPayload
+
+  """
+  Updates an existing project column.
+  """
+  updateProjectColumn(
+    """
+    Parameters for UpdateProjectColumn
+    """
+    input: UpdateProjectColumnInput!
+  ): UpdateProjectColumnPayload
+
+  """
+  Updates an existing project (beta).
+  """
+  updateProjectV2(
+    """
+    Parameters for UpdateProjectV2
+    """
+    input: UpdateProjectV2Input!
+  ): UpdateProjectV2Payload
+
+  """
+  Update the collaborators on a team or a project
+  """
+  updateProjectV2Collaborators(
+    """
+    Parameters for UpdateProjectV2Collaborators
+    """
+    input: UpdateProjectV2CollaboratorsInput!
+  ): UpdateProjectV2CollaboratorsPayload
+
+  """
+  Updates a draft issue within a Project.
+  """
+  updateProjectV2DraftIssue(
+    """
+    Parameters for UpdateProjectV2DraftIssue
+    """
+    input: UpdateProjectV2DraftIssueInput!
+  ): UpdateProjectV2DraftIssuePayload
+
+  """
+  This mutation updates the value of a field for an item in a Project. Currently
+  only single-select, text, number, date, and iteration fields are supported.
+  """
+  updateProjectV2ItemFieldValue(
+    """
+    Parameters for UpdateProjectV2ItemFieldValue
+    """
+    input: UpdateProjectV2ItemFieldValueInput!
+  ): UpdateProjectV2ItemFieldValuePayload
+
+  """
+  This mutation updates the position of the item in the project, where the position represents the priority of an item.
+  """
+  updateProjectV2ItemPosition(
+    """
+    Parameters for UpdateProjectV2ItemPosition
+    """
+    input: UpdateProjectV2ItemPositionInput!
+  ): UpdateProjectV2ItemPositionPayload
+
+  """
+  Update a pull request
+  """
+  updatePullRequest(
+    """
+    Parameters for UpdatePullRequest
+    """
+    input: UpdatePullRequestInput!
+  ): UpdatePullRequestPayload
+
+  """
+  Merge or Rebase HEAD from upstream branch into pull request branch
+  """
+  updatePullRequestBranch(
+    """
+    Parameters for UpdatePullRequestBranch
+    """
+    input: UpdatePullRequestBranchInput!
+  ): UpdatePullRequestBranchPayload
+
+  """
+  Updates the body of a pull request review.
+  """
+  updatePullRequestReview(
+    """
+    Parameters for UpdatePullRequestReview
+    """
+    input: UpdatePullRequestReviewInput!
+  ): UpdatePullRequestReviewPayload
+
+  """
+  Updates a pull request review comment.
+  """
+  updatePullRequestReviewComment(
+    """
+    Parameters for UpdatePullRequestReviewComment
+    """
+    input: UpdatePullRequestReviewCommentInput!
+  ): UpdatePullRequestReviewCommentPayload
+
+  """
+  Update a Git Ref.
+  """
+  updateRef(
+    """
+    Parameters for UpdateRef
+    """
+    input: UpdateRefInput!
+  ): UpdateRefPayload
+
+  """
+  Creates, updates and/or deletes multiple refs in a repository.
+
+  This mutation takes a list of `RefUpdate`s and performs these updates
+  on the repository. All updates are performed atomically, meaning that
+  if one of them is rejected, no other ref will be modified.
+
+  `RefUpdate.beforeOid` specifies that the given reference needs to point
+  to the given value before performing any updates. A value of
+  `0000000000000000000000000000000000000000` can be used to verify that
+  the references should not exist.
+
+  `RefUpdate.afterOid` specifies the value that the given reference
+  will point to after performing all updates. A value of
+  `0000000000000000000000000000000000000000` can be used to delete a
+  reference.
+
+  If `RefUpdate.force` is set to `true`, a non-fast-forward updates
+  for the given reference will be allowed.
+  """
+  updateRefs(
+    """
+    Parameters for UpdateRefs
+    """
+    input: UpdateRefsInput!
+  ): UpdateRefsPayload @preview(toggledBy: "update-refs-preview")
+
+  """
+  Update information about a repository.
+  """
+  updateRepository(
+    """
+    Parameters for UpdateRepository
+    """
+    input: UpdateRepositoryInput!
+  ): UpdateRepositoryPayload
+
+  """
+  Update a repository ruleset
+  """
+  updateRepositoryRuleset(
+    """
+    Parameters for UpdateRepositoryRuleset
+    """
+    input: UpdateRepositoryRulesetInput!
+  ): UpdateRepositoryRulesetPayload
+
+  """
+  Sets whether contributors are required to sign off on web-based commits for a repository.
+  """
+  updateRepositoryWebCommitSignoffSetting(
+    """
+    Parameters for UpdateRepositoryWebCommitSignoffSetting
+    """
+    input: UpdateRepositoryWebCommitSignoffSettingInput!
+  ): UpdateRepositoryWebCommitSignoffSettingPayload
+
+  """
+  Change visibility of your sponsorship and opt in or out of email updates from the maintainer.
+  """
+  updateSponsorshipPreferences(
+    """
+    Parameters for UpdateSponsorshipPreferences
+    """
+    input: UpdateSponsorshipPreferencesInput!
+  ): UpdateSponsorshipPreferencesPayload
+
+  """
+  Updates the state for subscribable subjects.
+  """
+  updateSubscription(
+    """
+    Parameters for UpdateSubscription
+    """
+    input: UpdateSubscriptionInput!
+  ): UpdateSubscriptionPayload
+
+  """
+  Updates a team discussion.
+  """
+  updateTeamDiscussion(
+    """
+    Parameters for UpdateTeamDiscussion
+    """
+    input: UpdateTeamDiscussionInput!
+  ): UpdateTeamDiscussionPayload
+
+  """
+  Updates a discussion comment.
+  """
+  updateTeamDiscussionComment(
+    """
+    Parameters for UpdateTeamDiscussionComment
+    """
+    input: UpdateTeamDiscussionCommentInput!
+  ): UpdateTeamDiscussionCommentPayload
+
+  """
+  Updates team review assignment.
+  """
+  updateTeamReviewAssignment(
+    """
+    Parameters for UpdateTeamReviewAssignment
+    """
+    input: UpdateTeamReviewAssignmentInput!
+  ): UpdateTeamReviewAssignmentPayload @preview(toggledBy: "stone-crop-preview")
+
+  """
+  Update team repository.
+  """
+  updateTeamsRepository(
+    """
+    Parameters for UpdateTeamsRepository
+    """
+    input: UpdateTeamsRepositoryInput!
+  ): UpdateTeamsRepositoryPayload
+
+  """
+  Replaces the repository's topics with the given topics.
+  """
+  updateTopics(
+    """
+    Parameters for UpdateTopics
+    """
+    input: UpdateTopicsInput!
+  ): UpdateTopicsPayload
+
+  """
+  Verify that a verifiable domain has the expected DNS record.
+  """
+  verifyVerifiableDomain(
+    """
+    Parameters for VerifyVerifiableDomain
+    """
+    input: VerifyVerifiableDomainInput!
+  ): VerifyVerifiableDomainPayload
+}
+
+"""
+An object with an ID.
+"""
+interface Node {
+  """
+  ID of the object.
+  """
+  id: ID!
+}
+
+"""
+The possible values for the notification restriction setting.
+"""
+enum NotificationRestrictionSettingValue {
+  """
+  The setting is disabled for the owner.
+  """
+  DISABLED
+
+  """
+  The setting is enabled for the owner.
+  """
+  ENABLED
+}
+
+"""
+An OIDC identity provider configured to provision identities for an enterprise.
+Visible to enterprise owners or enterprise owners' personal access tokens
+(classic) with read:enterprise or admin:enterprise scope.
+"""
+type OIDCProvider implements Node {
+  """
+  The enterprise this identity provider belongs to.
+  """
+  enterprise: Enterprise
+
+  """
+  ExternalIdentities provisioned by this identity provider.
+  """
+  externalIdentities(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter to external identities with the users login
+    """
+    login: String
+
+    """
+    Filter to external identities with valid org membership only
+    """
+    membersOnly: Boolean
+
+    """
+    Filter to external identities with the users userName/NameID attribute
+    """
+    userName: String
+  ): ExternalIdentityConnection!
+
+  """
+  The Node ID of the OIDCProvider object
+  """
+  id: ID!
+
+  """
+  The OIDC identity provider type
+  """
+  providerType: OIDCProviderType!
+
+  """
+  The id of the tenant this provider is attached to
+  """
+  tenantId: String!
+}
+
+"""
+The OIDC identity provider type
+"""
+enum OIDCProviderType {
+  """
+  Azure Active Directory
+  """
+  AAD
+}
+
+"""
+Metadata for an audit entry with action oauth_application.*
+"""
+interface OauthApplicationAuditEntryData {
+  """
+  The name of the OAuth application.
+  """
+  oauthApplicationName: String
+
+  """
+  The HTTP path for the OAuth application
+  """
+  oauthApplicationResourcePath: URI
+
+  """
+  The HTTP URL for the OAuth application
+  """
+  oauthApplicationUrl: URI
+}
+
+"""
+Audit log entry for a oauth_application.create event.
+"""
+type OauthApplicationCreateAuditEntry implements AuditEntry & Node & OauthApplicationAuditEntryData & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The application URL of the OAuth application.
+  """
+  applicationUrl: URI
+
+  """
+  The callback URL of the OAuth application.
+  """
+  callbackUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OauthApplicationCreateAuditEntry object
+  """
+  id: ID!
+
+  """
+  The name of the OAuth application.
+  """
+  oauthApplicationName: String
+
+  """
+  The HTTP path for the OAuth application
+  """
+  oauthApplicationResourcePath: URI
+
+  """
+  The HTTP URL for the OAuth application
+  """
+  oauthApplicationUrl: URI
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The rate limit of the OAuth application.
+  """
+  rateLimit: Int
+
+  """
+  The state of the OAuth application.
+  """
+  state: OauthApplicationCreateAuditEntryState
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The state of an OAuth application when it was created.
+"""
+enum OauthApplicationCreateAuditEntryState {
+  """
+  The OAuth application was active and allowed to have OAuth Accesses.
+  """
+  ACTIVE
+
+  """
+  The OAuth application was in the process of being deleted.
+  """
+  PENDING_DELETION
+
+  """
+  The OAuth application was suspended from generating OAuth Accesses due to abuse or security concerns.
+  """
+  SUSPENDED
+}
+
+"""
+The corresponding operation type for the action
+"""
+enum OperationType {
+  """
+  An existing resource was accessed
+  """
+  ACCESS
+
+  """
+  A resource performed an authentication event
+  """
+  AUTHENTICATION
+
+  """
+  A new resource was created
+  """
+  CREATE
+
+  """
+  An existing resource was modified
+  """
+  MODIFY
+
+  """
+  An existing resource was removed
+  """
+  REMOVE
+
+  """
+  An existing resource was restored
+  """
+  RESTORE
+
+  """
+  An existing resource was transferred between multiple resources
+  """
+  TRANSFER
+}
+
+"""
+Possible directions in which to order a list of items when provided an `orderBy` argument.
+"""
+enum OrderDirection {
+  """
+  Specifies an ascending order for a given `orderBy` argument.
+  """
+  ASC
+
+  """
+  Specifies a descending order for a given `orderBy` argument.
+  """
+  DESC
+}
+
+"""
+Audit log entry for a org.add_billing_manager
+"""
+type OrgAddBillingManagerAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgAddBillingManagerAuditEntry object
+  """
+  id: ID!
+
+  """
+  The email address used to invite a billing manager for the organization.
+  """
+  invitationEmail: String
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.add_member
+"""
+type OrgAddMemberAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgAddMemberAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The permission level of the member added to the organization.
+  """
+  permission: OrgAddMemberAuditEntryPermission
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The permissions available to members on an Organization.
+"""
+enum OrgAddMemberAuditEntryPermission {
+  """
+  Can read, clone, push, and add collaborators to repositories.
+  """
+  ADMIN
+
+  """
+  Can read and clone repositories.
+  """
+  READ
+}
+
+"""
+Audit log entry for a org.block_user
+"""
+type OrgBlockUserAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The blocked user.
+  """
+  blockedUser: User
+
+  """
+  The username of the blocked user.
+  """
+  blockedUserName: String
+
+  """
+  The HTTP path for the blocked user.
+  """
+  blockedUserResourcePath: URI
+
+  """
+  The HTTP URL for the blocked user.
+  """
+  blockedUserUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgBlockUserAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.config.disable_collaborators_only event.
+"""
+type OrgConfigDisableCollaboratorsOnlyAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgConfigDisableCollaboratorsOnlyAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.config.enable_collaborators_only event.
+"""
+type OrgConfigEnableCollaboratorsOnlyAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgConfigEnableCollaboratorsOnlyAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.create event.
+"""
+type OrgCreateAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The billing plan for the Organization.
+  """
+  billingPlan: OrgCreateAuditEntryBillingPlan
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgCreateAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The billing plans available for organizations.
+"""
+enum OrgCreateAuditEntryBillingPlan {
+  """
+  Team Plan
+  """
+  BUSINESS
+
+  """
+  Enterprise Cloud Plan
+  """
+  BUSINESS_PLUS
+
+  """
+  Free Plan
+  """
+  FREE
+
+  """
+  Tiered Per Seat Plan
+  """
+  TIERED_PER_SEAT
+
+  """
+  Legacy Unlimited Plan
+  """
+  UNLIMITED
+}
+
+"""
+Audit log entry for a org.disable_oauth_app_restrictions event.
+"""
+type OrgDisableOauthAppRestrictionsAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgDisableOauthAppRestrictionsAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.disable_saml event.
+"""
+type OrgDisableSamlAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The SAML provider's digest algorithm URL.
+  """
+  digestMethodUrl: URI
+
+  """
+  The Node ID of the OrgDisableSamlAuditEntry object
+  """
+  id: ID!
+
+  """
+  The SAML provider's issuer URL.
+  """
+  issuerUrl: URI
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The SAML provider's signature algorithm URL.
+  """
+  signatureMethodUrl: URI
+
+  """
+  The SAML provider's single sign-on URL.
+  """
+  singleSignOnUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.disable_two_factor_requirement event.
+"""
+type OrgDisableTwoFactorRequirementAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgDisableTwoFactorRequirementAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.enable_oauth_app_restrictions event.
+"""
+type OrgEnableOauthAppRestrictionsAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgEnableOauthAppRestrictionsAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.enable_saml event.
+"""
+type OrgEnableSamlAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The SAML provider's digest algorithm URL.
+  """
+  digestMethodUrl: URI
+
+  """
+  The Node ID of the OrgEnableSamlAuditEntry object
+  """
+  id: ID!
+
+  """
+  The SAML provider's issuer URL.
+  """
+  issuerUrl: URI
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The SAML provider's signature algorithm URL.
+  """
+  signatureMethodUrl: URI
+
+  """
+  The SAML provider's single sign-on URL.
+  """
+  singleSignOnUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.enable_two_factor_requirement event.
+"""
+type OrgEnableTwoFactorRequirementAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgEnableTwoFactorRequirementAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Ordering options for an organization's enterprise owner connections.
+"""
+input OrgEnterpriseOwnerOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order enterprise owners by.
+  """
+  field: OrgEnterpriseOwnerOrderField!
+}
+
+"""
+Properties by which enterprise owners can be ordered.
+"""
+enum OrgEnterpriseOwnerOrderField {
+  """
+  Order enterprise owners by login.
+  """
+  LOGIN
+}
+
+"""
+Audit log entry for a org.invite_member event.
+"""
+type OrgInviteMemberAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The email address of the organization invitation.
+  """
+  email: String
+
+  """
+  The Node ID of the OrgInviteMemberAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The organization invitation.
+  """
+  organizationInvitation: OrganizationInvitation
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.invite_to_business event.
+"""
+type OrgInviteToBusinessAuditEntry implements AuditEntry & EnterpriseAuditEntryData & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The HTTP path for this enterprise.
+  """
+  enterpriseResourcePath: URI
+
+  """
+  The slug of the enterprise.
+  """
+  enterpriseSlug: String
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  enterpriseUrl: URI
+
+  """
+  The Node ID of the OrgInviteToBusinessAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.oauth_app_access_approved event.
+"""
+type OrgOauthAppAccessApprovedAuditEntry implements AuditEntry & Node & OauthApplicationAuditEntryData & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgOauthAppAccessApprovedAuditEntry object
+  """
+  id: ID!
+
+  """
+  The name of the OAuth application.
+  """
+  oauthApplicationName: String
+
+  """
+  The HTTP path for the OAuth application
+  """
+  oauthApplicationResourcePath: URI
+
+  """
+  The HTTP URL for the OAuth application
+  """
+  oauthApplicationUrl: URI
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.oauth_app_access_blocked event.
+"""
+type OrgOauthAppAccessBlockedAuditEntry implements AuditEntry & Node & OauthApplicationAuditEntryData & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgOauthAppAccessBlockedAuditEntry object
+  """
+  id: ID!
+
+  """
+  The name of the OAuth application.
+  """
+  oauthApplicationName: String
+
+  """
+  The HTTP path for the OAuth application
+  """
+  oauthApplicationResourcePath: URI
+
+  """
+  The HTTP URL for the OAuth application
+  """
+  oauthApplicationUrl: URI
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.oauth_app_access_denied event.
+"""
+type OrgOauthAppAccessDeniedAuditEntry implements AuditEntry & Node & OauthApplicationAuditEntryData & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgOauthAppAccessDeniedAuditEntry object
+  """
+  id: ID!
+
+  """
+  The name of the OAuth application.
+  """
+  oauthApplicationName: String
+
+  """
+  The HTTP path for the OAuth application
+  """
+  oauthApplicationResourcePath: URI
+
+  """
+  The HTTP URL for the OAuth application
+  """
+  oauthApplicationUrl: URI
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.oauth_app_access_requested event.
+"""
+type OrgOauthAppAccessRequestedAuditEntry implements AuditEntry & Node & OauthApplicationAuditEntryData & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgOauthAppAccessRequestedAuditEntry object
+  """
+  id: ID!
+
+  """
+  The name of the OAuth application.
+  """
+  oauthApplicationName: String
+
+  """
+  The HTTP path for the OAuth application
+  """
+  oauthApplicationResourcePath: URI
+
+  """
+  The HTTP URL for the OAuth application
+  """
+  oauthApplicationUrl: URI
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.oauth_app_access_unblocked event.
+"""
+type OrgOauthAppAccessUnblockedAuditEntry implements AuditEntry & Node & OauthApplicationAuditEntryData & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgOauthAppAccessUnblockedAuditEntry object
+  """
+  id: ID!
+
+  """
+  The name of the OAuth application.
+  """
+  oauthApplicationName: String
+
+  """
+  The HTTP path for the OAuth application
+  """
+  oauthApplicationResourcePath: URI
+
+  """
+  The HTTP URL for the OAuth application
+  """
+  oauthApplicationUrl: URI
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.remove_billing_manager event.
+"""
+type OrgRemoveBillingManagerAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgRemoveBillingManagerAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The reason for the billing manager being removed.
+  """
+  reason: OrgRemoveBillingManagerAuditEntryReason
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The reason a billing manager was removed from an Organization.
+"""
+enum OrgRemoveBillingManagerAuditEntryReason {
+  """
+  SAML external identity missing
+  """
+  SAML_EXTERNAL_IDENTITY_MISSING
+
+  """
+  SAML SSO enforcement requires an external identity
+  """
+  SAML_SSO_ENFORCEMENT_REQUIRES_EXTERNAL_IDENTITY
+
+  """
+  The organization required 2FA of its billing managers and this user did not have 2FA enabled.
+  """
+  TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE
+}
+
+"""
+Audit log entry for a org.remove_member event.
+"""
+type OrgRemoveMemberAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgRemoveMemberAuditEntry object
+  """
+  id: ID!
+
+  """
+  The types of membership the member has with the organization.
+  """
+  membershipTypes: [OrgRemoveMemberAuditEntryMembershipType!]
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The reason for the member being removed.
+  """
+  reason: OrgRemoveMemberAuditEntryReason
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The type of membership a user has with an Organization.
+"""
+enum OrgRemoveMemberAuditEntryMembershipType {
+  """
+  Organization owners have full access and can change several settings,
+  including the names of repositories that belong to the Organization and Owners
+  team membership. In addition, organization owners can delete the organization
+  and all of its repositories.
+  """
+  ADMIN
+
+  """
+  A billing manager is a user who manages the billing settings for the Organization, such as updating payment information.
+  """
+  BILLING_MANAGER
+
+  """
+  A direct member is a user that is a member of the Organization.
+  """
+  DIRECT_MEMBER
+
+  """
+  An outside collaborator is a person who isn't explicitly a member of the
+  Organization, but who has Read, Write, or Admin permissions to one or more
+  repositories in the organization.
+  """
+  OUTSIDE_COLLABORATOR
+
+  """
+  A suspended member.
+  """
+  SUSPENDED
+
+  """
+  An unaffiliated collaborator is a person who is not a member of the
+  Organization and does not have access to any repositories in the Organization.
+  """
+  UNAFFILIATED
+}
+
+"""
+The reason a member was removed from an Organization.
+"""
+enum OrgRemoveMemberAuditEntryReason {
+  """
+  SAML external identity missing
+  """
+  SAML_EXTERNAL_IDENTITY_MISSING
+
+  """
+  SAML SSO enforcement requires an external identity
+  """
+  SAML_SSO_ENFORCEMENT_REQUIRES_EXTERNAL_IDENTITY
+
+  """
+  User was removed from organization during account recovery
+  """
+  TWO_FACTOR_ACCOUNT_RECOVERY
+
+  """
+  The organization required 2FA of its billing managers and this user did not have 2FA enabled.
+  """
+  TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE
+
+  """
+  User account has been deleted
+  """
+  USER_ACCOUNT_DELETED
+}
+
+"""
+Audit log entry for a org.remove_outside_collaborator event.
+"""
+type OrgRemoveOutsideCollaboratorAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgRemoveOutsideCollaboratorAuditEntry object
+  """
+  id: ID!
+
+  """
+  The types of membership the outside collaborator has with the organization.
+  """
+  membershipTypes: [OrgRemoveOutsideCollaboratorAuditEntryMembershipType!]
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The reason for the outside collaborator being removed from the Organization.
+  """
+  reason: OrgRemoveOutsideCollaboratorAuditEntryReason
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The type of membership a user has with an Organization.
+"""
+enum OrgRemoveOutsideCollaboratorAuditEntryMembershipType {
+  """
+  A billing manager is a user who manages the billing settings for the Organization, such as updating payment information.
+  """
+  BILLING_MANAGER
+
+  """
+  An outside collaborator is a person who isn't explicitly a member of the
+  Organization, but who has Read, Write, or Admin permissions to one or more
+  repositories in the organization.
+  """
+  OUTSIDE_COLLABORATOR
+
+  """
+  An unaffiliated collaborator is a person who is not a member of the
+  Organization and does not have access to any repositories in the organization.
+  """
+  UNAFFILIATED
+}
+
+"""
+The reason an outside collaborator was removed from an Organization.
+"""
+enum OrgRemoveOutsideCollaboratorAuditEntryReason {
+  """
+  SAML external identity missing
+  """
+  SAML_EXTERNAL_IDENTITY_MISSING
+
+  """
+  The organization required 2FA of its billing managers and this user did not have 2FA enabled.
+  """
+  TWO_FACTOR_REQUIREMENT_NON_COMPLIANCE
+}
+
+"""
+Audit log entry for a org.restore_member event.
+"""
+type OrgRestoreMemberAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgRestoreMemberAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The number of custom email routings for the restored member.
+  """
+  restoredCustomEmailRoutingsCount: Int
+
+  """
+  The number of issue assignments for the restored member.
+  """
+  restoredIssueAssignmentsCount: Int
+
+  """
+  Restored organization membership objects.
+  """
+  restoredMemberships: [OrgRestoreMemberAuditEntryMembership!]
+
+  """
+  The number of restored memberships.
+  """
+  restoredMembershipsCount: Int
+
+  """
+  The number of repositories of the restored member.
+  """
+  restoredRepositoriesCount: Int
+
+  """
+  The number of starred repositories for the restored member.
+  """
+  restoredRepositoryStarsCount: Int
+
+  """
+  The number of watched repositories for the restored member.
+  """
+  restoredRepositoryWatchesCount: Int
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Types of memberships that can be restored for an Organization member.
+"""
+union OrgRestoreMemberAuditEntryMembership =
+    OrgRestoreMemberMembershipOrganizationAuditEntryData
+  | OrgRestoreMemberMembershipRepositoryAuditEntryData
+  | OrgRestoreMemberMembershipTeamAuditEntryData
+
+"""
+Metadata for an organization membership for org.restore_member actions
+"""
+type OrgRestoreMemberMembershipOrganizationAuditEntryData implements OrganizationAuditEntryData {
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+}
+
+"""
+Metadata for a repository membership for org.restore_member actions
+"""
+type OrgRestoreMemberMembershipRepositoryAuditEntryData implements RepositoryAuditEntryData {
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+}
+
+"""
+Metadata for a team membership for org.restore_member actions
+"""
+type OrgRestoreMemberMembershipTeamAuditEntryData implements TeamAuditEntryData {
+  """
+  The team associated with the action
+  """
+  team: Team
+
+  """
+  The name of the team
+  """
+  teamName: String
+
+  """
+  The HTTP path for this team
+  """
+  teamResourcePath: URI
+
+  """
+  The HTTP URL for this team
+  """
+  teamUrl: URI
+}
+
+"""
+Audit log entry for a org.unblock_user
+"""
+type OrgUnblockUserAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The user being unblocked by the organization.
+  """
+  blockedUser: User
+
+  """
+  The username of the blocked user.
+  """
+  blockedUserName: String
+
+  """
+  The HTTP path for the blocked user.
+  """
+  blockedUserResourcePath: URI
+
+  """
+  The HTTP URL for the blocked user.
+  """
+  blockedUserUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgUnblockUserAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a org.update_default_repository_permission
+"""
+type OrgUpdateDefaultRepositoryPermissionAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgUpdateDefaultRepositoryPermissionAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The new base repository permission level for the organization.
+  """
+  permission: OrgUpdateDefaultRepositoryPermissionAuditEntryPermission
+
+  """
+  The former base repository permission level for the organization.
+  """
+  permissionWas: OrgUpdateDefaultRepositoryPermissionAuditEntryPermission
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The default permission a repository can have in an Organization.
+"""
+enum OrgUpdateDefaultRepositoryPermissionAuditEntryPermission {
+  """
+  Can read, clone, push, and add collaborators to repositories.
+  """
+  ADMIN
+
+  """
+  No default permission value.
+  """
+  NONE
+
+  """
+  Can read and clone repositories.
+  """
+  READ
+
+  """
+  Can read, clone and push to repositories.
+  """
+  WRITE
+}
+
+"""
+Audit log entry for a org.update_member event.
+"""
+type OrgUpdateMemberAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgUpdateMemberAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The new member permission level for the organization.
+  """
+  permission: OrgUpdateMemberAuditEntryPermission
+
+  """
+  The former member permission level for the organization.
+  """
+  permissionWas: OrgUpdateMemberAuditEntryPermission
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The permissions available to members on an Organization.
+"""
+enum OrgUpdateMemberAuditEntryPermission {
+  """
+  Can read, clone, push, and add collaborators to repositories.
+  """
+  ADMIN
+
+  """
+  Can read and clone repositories.
+  """
+  READ
+}
+
+"""
+Audit log entry for a org.update_member_repository_creation_permission event.
+"""
+type OrgUpdateMemberRepositoryCreationPermissionAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  Can members create repositories in the organization.
+  """
+  canCreateRepositories: Boolean
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgUpdateMemberRepositoryCreationPermissionAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+
+  """
+  The permission for visibility level of repositories for this organization.
+  """
+  visibility: OrgUpdateMemberRepositoryCreationPermissionAuditEntryVisibility
+}
+
+"""
+The permissions available for repository creation on an Organization.
+"""
+enum OrgUpdateMemberRepositoryCreationPermissionAuditEntryVisibility {
+  """
+  All organization members are restricted from creating any repositories.
+  """
+  ALL
+
+  """
+  All organization members are restricted from creating internal repositories.
+  """
+  INTERNAL
+
+  """
+  All organization members are allowed to create any repositories.
+  """
+  NONE
+
+  """
+  All organization members are restricted from creating private repositories.
+  """
+  PRIVATE
+
+  """
+  All organization members are restricted from creating private or internal repositories.
+  """
+  PRIVATE_INTERNAL
+
+  """
+  All organization members are restricted from creating public repositories.
+  """
+  PUBLIC
+
+  """
+  All organization members are restricted from creating public or internal repositories.
+  """
+  PUBLIC_INTERNAL
+
+  """
+  All organization members are restricted from creating public or private repositories.
+  """
+  PUBLIC_PRIVATE
+}
+
+"""
+Audit log entry for a org.update_member_repository_invitation_permission event.
+"""
+type OrgUpdateMemberRepositoryInvitationPermissionAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  Can outside collaborators be invited to repositories in the organization.
+  """
+  canInviteOutsideCollaboratorsToRepositories: Boolean
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the OrgUpdateMemberRepositoryInvitationPermissionAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+An account on GitHub, with one or more owners, that has repositories, members and teams.
+"""
+type Organization implements Actor & AnnouncementBanner & MemberStatusable & Node & PackageOwner & ProfileOwner & ProjectOwner & ProjectV2Owner & ProjectV2Recent & RepositoryDiscussionAuthor & RepositoryDiscussionCommentAuthor & RepositoryOwner & Sponsorable & UniformResourceLocatable {
+  """
+  The text of the announcement
+  """
+  announcement: String
+
+  """
+  The expiration date of the announcement, if any
+  """
+  announcementExpiresAt: DateTime
+
+  """
+  Whether the announcement can be dismissed by the user
+  """
+  announcementUserDismissible: Boolean
+
+  """
+  Determine if this repository owner has any items that can be pinned to their profile.
+  """
+  anyPinnableItems(
+    """
+    Filter to only a particular kind of pinnable item.
+    """
+    type: PinnableItemType
+  ): Boolean!
+
+  """
+  Identifies the date and time when the organization was archived.
+  """
+  archivedAt: DateTime
+
+  """
+  Audit log entries of the organization
+  """
+  auditLog(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the returned audit log entries.
+    """
+    orderBy: AuditLogOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    The query string to filter audit entries
+    """
+    query: String
+  ): OrganizationAuditEntryConnection!
+
+  """
+  A URL pointing to the organization's public avatar.
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The organization's public profile description.
+  """
+  description: String
+
+  """
+  The organization's public profile description rendered to HTML.
+  """
+  descriptionHTML: String
+
+  """
+  A list of domains owned by the organization.
+  """
+  domains(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Filter by if the domain is approved.
+    """
+    isApproved: Boolean = null
+
+    """
+    Filter by if the domain is verified.
+    """
+    isVerified: Boolean = null
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for verifiable domains returned.
+    """
+    orderBy: VerifiableDomainOrder = {field: DOMAIN, direction: ASC}
+  ): VerifiableDomainConnection
+
+  """
+  The organization's public email.
+  """
+  email: String
+
+  """
+  A list of owners of the organization's enterprise account.
+  """
+  enterpriseOwners(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for enterprise owners returned from the connection.
+    """
+    orderBy: OrgEnterpriseOwnerOrder = {field: LOGIN, direction: ASC}
+
+    """
+    The organization role to filter by.
+    """
+    organizationRole: RoleInOrganization
+
+    """
+    The search string to look for.
+    """
+    query: String
+  ): OrganizationEnterpriseOwnerConnection!
+
+  """
+  The estimated next GitHub Sponsors payout for this user/organization in cents (USD).
+  """
+  estimatedNextSponsorsPayoutInCents: Int!
+
+  """
+  True if this user/organization has a GitHub Sponsors listing.
+  """
+  hasSponsorsListing: Boolean!
+
+  """
+  The Node ID of the Organization object
+  """
+  id: ID!
+
+  """
+  The interaction ability settings for this organization.
+  """
+  interactionAbility: RepositoryInteractionAbility
+
+  """
+  The setting value for whether the organization has an IP allow list enabled.
+  """
+  ipAllowListEnabledSetting: IpAllowListEnabledSettingValue!
+
+  """
+  The IP addresses that are allowed to access resources owned by the organization.
+  """
+  ipAllowListEntries(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for IP allow list entries returned.
+    """
+    orderBy: IpAllowListEntryOrder = {field: ALLOW_LIST_VALUE, direction: ASC}
+  ): IpAllowListEntryConnection!
+
+  """
+  The setting value for whether the organization has IP allow list configuration for installed GitHub Apps enabled.
+  """
+  ipAllowListForInstalledAppsEnabledSetting: IpAllowListForInstalledAppsEnabledSettingValue!
+
+  """
+  Whether the given account is sponsoring this user/organization.
+  """
+  isSponsoredBy(
+    """
+    The target account's login.
+    """
+    accountLogin: String!
+  ): Boolean!
+
+  """
+  True if the viewer is sponsored by this user/organization.
+  """
+  isSponsoringViewer: Boolean!
+
+  """
+  Whether the organization has verified its profile email and website.
+  """
+  isVerified: Boolean!
+
+  """
+  Showcases a selection of repositories and gists that the profile owner has
+  either curated or that have been selected automatically based on popularity.
+  """
+  itemShowcase: ProfileItemShowcase!
+
+  """
+  The organization's public profile location.
+  """
+  location: String
+
+  """
+  The organization's login name.
+  """
+  login: String!
+
+  """
+  A list of all mannequins for this organization.
+  """
+  mannequins(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter mannequins by login.
+    """
+    login: String
+
+    """
+    Ordering options for mannequins returned from the connection.
+    """
+    orderBy: MannequinOrder = {field: CREATED_AT, direction: ASC}
+  ): MannequinConnection!
+
+  """
+  Get the status messages members of this entity have set that are either public or visible only to the organization.
+  """
+  memberStatuses(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for user statuses returned from the connection.
+    """
+    orderBy: UserStatusOrder = {field: UPDATED_AT, direction: DESC}
+  ): UserStatusConnection!
+
+  """
+  Members can fork private repositories in this organization
+  """
+  membersCanForkPrivateRepositories: Boolean!
+
+  """
+  A list of users who are members of this organization.
+  """
+  membersWithRole(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): OrganizationMemberConnection!
+
+  """
+  The estimated monthly GitHub Sponsors income for this user/organization in cents (USD).
+  """
+  monthlyEstimatedSponsorsIncomeInCents: Int!
+
+  """
+  The organization's public profile name.
+  """
+  name: String
+
+  """
+  The HTTP path creating a new team
+  """
+  newTeamResourcePath: URI!
+
+  """
+  The HTTP URL creating a new team
+  """
+  newTeamUrl: URI!
+
+  """
+  Indicates if email notification delivery for this organization is restricted to verified or approved domains.
+  """
+  notificationDeliveryRestrictionEnabledSetting: NotificationRestrictionSettingValue!
+
+  """
+  The billing email for the organization.
+  """
+  organizationBillingEmail: String
+
+  """
+  A list of packages under the owner.
+  """
+  packages(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Find packages by their names.
+    """
+    names: [String]
+
+    """
+    Ordering of the returned packages.
+    """
+    orderBy: PackageOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    Filter registry package by type.
+    """
+    packageType: PackageType
+
+    """
+    Find packages in a repository by ID.
+    """
+    repositoryId: ID
+  ): PackageConnection!
+
+  """
+  A list of users who have been invited to join this organization.
+  """
+  pendingMembers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+
+  """
+  A list of repositories and gists this profile owner can pin to their profile.
+  """
+  pinnableItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter the types of pinnable items that are returned.
+    """
+    types: [PinnableItemType!]
+  ): PinnableItemConnection!
+
+  """
+  A list of repositories and gists this profile owner has pinned to their profile
+  """
+  pinnedItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter the types of pinned items that are returned.
+    """
+    types: [PinnableItemType!]
+  ): PinnableItemConnection!
+
+  """
+  Returns how many more items this profile owner can pin to their profile.
+  """
+  pinnedItemsRemaining: Int!
+
+  """
+  Find project by number.
+  """
+  project(
+    """
+    The project number to find.
+    """
+    number: Int!
+  ): Project
+
+  """
+  Find a project by number.
+  """
+  projectV2(
+    """
+    The project number.
+    """
+    number: Int!
+  ): ProjectV2
+
+  """
+  A list of projects under the owner.
+  """
+  projects(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for projects returned from the connection
+    """
+    orderBy: ProjectOrder
+
+    """
+    Query to search projects by, currently only searching by name.
+    """
+    search: String
+
+    """
+    A list of states to filter the projects by.
+    """
+    states: [ProjectState!]
+  ): ProjectConnection!
+
+  """
+  The HTTP path listing organization's projects
+  """
+  projectsResourcePath: URI!
+
+  """
+  The HTTP URL listing organization's projects
+  """
+  projectsUrl: URI!
+
+  """
+  A list of projects under the owner.
+  """
+  projectsV2(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    How to order the returned projects.
+    """
+    orderBy: ProjectV2Order = {field: NUMBER, direction: DESC}
+
+    """
+    A project to search for under the the owner.
+    """
+    query: String
+  ): ProjectV2Connection!
+
+  """
+  Recent projects that this user has modified in the context of the owner.
+  """
+  recentProjects(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2Connection!
+
+  """
+  A list of repositories that the user owns.
+  """
+  repositories(
+    """
+    Array of viewer's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    current viewer owns.
+    """
+    affiliations: [RepositoryAffiliation]
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    If non-null, filters repositories according to whether they have issues enabled
+    """
+    hasIssuesEnabled: Boolean
+
+    """
+    If non-null, filters repositories according to whether they are archived and not maintained
+    """
+    isArchived: Boolean
+
+    """
+    If non-null, filters repositories according to whether they are forks of another repository
+    """
+    isFork: Boolean
+
+    """
+    If non-null, filters repositories according to whether they have been locked
+    """
+    isLocked: Boolean
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories returned from the connection
+    """
+    orderBy: RepositoryOrder
+
+    """
+    Array of owner's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    organization or user being viewed owns.
+    """
+    ownerAffiliations: [RepositoryAffiliation] = [OWNER, COLLABORATOR]
+
+    """
+    If non-null, filters repositories according to privacy
+    """
+    privacy: RepositoryPrivacy
+  ): RepositoryConnection!
+
+  """
+  Find Repository.
+  """
+  repository(
+    """
+    Follow repository renames. If disabled, a repository referenced by its old name will return an error.
+    """
+    followRenames: Boolean = true
+
+    """
+    Name of Repository to find.
+    """
+    name: String!
+  ): Repository
+
+  """
+  Discussion comments this user has authored.
+  """
+  repositoryDiscussionComments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter discussion comments to only those that were marked as the answer
+    """
+    onlyAnswers: Boolean = false
+
+    """
+    Filter discussion comments to only those in a specific repository.
+    """
+    repositoryId: ID
+  ): DiscussionCommentConnection!
+
+  """
+  Discussions this user has started.
+  """
+  repositoryDiscussions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Filter discussions to only those that have been answered or not. Defaults to
+    including both answered and unanswered discussions.
+    """
+    answered: Boolean = null
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for discussions returned from the connection.
+    """
+    orderBy: DiscussionOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    Filter discussions to only those in a specific repository.
+    """
+    repositoryId: ID
+
+    """
+    A list of states to filter the discussions by.
+    """
+    states: [DiscussionState!] = []
+  ): DiscussionConnection!
+
+  """
+  A list of all repository migrations for this organization.
+  """
+  repositoryMigrations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repository migrations returned.
+    """
+    orderBy: RepositoryMigrationOrder = {field: CREATED_AT, direction: ASC}
+
+    """
+    Filter repository migrations by repository name.
+    """
+    repositoryName: String
+
+    """
+    Filter repository migrations by state.
+    """
+    state: MigrationState
+  ): RepositoryMigrationConnection!
+
+  """
+  When true the organization requires all members, billing managers, and outside
+  collaborators to enable two-factor authentication.
+  """
+  requiresTwoFactorAuthentication: Boolean
+
+  """
+  The HTTP path for this organization.
+  """
+  resourcePath: URI!
+
+  """
+  Returns a single ruleset from the current organization by ID.
+  """
+  ruleset(
+    """
+    The ID of the ruleset to be returned.
+    """
+    databaseId: Int!
+  ): RepositoryRuleset
+
+  """
+  A list of rulesets for this organization.
+  """
+  rulesets(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Return rulesets configured at higher levels that apply to this organization
+    """
+    includeParents: Boolean = true
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): RepositoryRulesetConnection
+
+  """
+  The Organization's SAML identity provider. Visible to (1) organization owners,
+  (2) organization owners' personal access tokens (classic) with read:org or
+  admin:org scope, (3) GitHub App with an installation token with read or write
+  access to members.
+  """
+  samlIdentityProvider: OrganizationIdentityProvider
+
+  """
+  List of users and organizations this entity is sponsoring.
+  """
+  sponsoring(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the users and organizations returned from the connection.
+    """
+    orderBy: SponsorOrder = {field: RELEVANCE, direction: DESC}
+  ): SponsorConnection!
+
+  """
+  List of sponsors for this user or organization.
+  """
+  sponsors(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsors returned from the connection.
+    """
+    orderBy: SponsorOrder = {field: RELEVANCE, direction: DESC}
+
+    """
+    If given, will filter for sponsors at the given tier. Will only return
+    sponsors whose tier the viewer is permitted to see.
+    """
+    tierId: ID
+  ): SponsorConnection!
+
+  """
+  Events involving this sponsorable, such as new sponsorships.
+  """
+  sponsorsActivities(
+    """
+    Filter activities to only the specified actions.
+    """
+    actions: [SponsorsActivityAction!] = []
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Whether to include those events where this sponsorable acted as the sponsor.
+    Defaults to only including events where this sponsorable was the recipient
+    of a sponsorship.
+    """
+    includeAsSponsor: Boolean = false
+
+    """
+    Whether or not to include private activities in the result set. Defaults to including public and private activities.
+    """
+    includePrivate: Boolean = true
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for activity returned from the connection.
+    """
+    orderBy: SponsorsActivityOrder = {field: TIMESTAMP, direction: DESC}
+
+    """
+    Filter activities returned to only those that occurred in the most recent
+    specified time period. Set to ALL to avoid filtering by when the activity
+    occurred. Will be ignored if `since` or `until` is given.
+    """
+    period: SponsorsActivityPeriod = MONTH
+
+    """
+    Filter activities to those that occurred on or after this time.
+    """
+    since: DateTime
+
+    """
+    Filter activities to those that occurred before this time.
+    """
+    until: DateTime
+  ): SponsorsActivityConnection!
+
+  """
+  The GitHub Sponsors listing for this user or organization.
+  """
+  sponsorsListing: SponsorsListing
+
+  """
+  The sponsorship from the viewer to this user/organization; that is, the sponsorship where you're the sponsor.
+  """
+  sponsorshipForViewerAsSponsor(
+    """
+    Whether to return the sponsorship only if it's still active. Pass false to
+    get the viewer's sponsorship back even if it has been cancelled.
+    """
+    activeOnly: Boolean = true
+  ): Sponsorship
+
+  """
+  The sponsorship from this user/organization to the viewer; that is, the sponsorship you're receiving.
+  """
+  sponsorshipForViewerAsSponsorable(
+    """
+    Whether to return the sponsorship only if it's still active. Pass false to
+    get the sponsorship back even if it has been cancelled.
+    """
+    activeOnly: Boolean = true
+  ): Sponsorship
+
+  """
+  List of sponsorship updates sent from this sponsorable to sponsors.
+  """
+  sponsorshipNewsletters(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsorship updates returned from the connection.
+    """
+    orderBy: SponsorshipNewsletterOrder = {field: CREATED_AT, direction: DESC}
+  ): SponsorshipNewsletterConnection!
+
+  """
+  The sponsorships where this user or organization is the maintainer receiving the funds.
+  """
+  sponsorshipsAsMaintainer(
+    """
+    Whether to include only sponsorships that are active right now, versus all
+    sponsorships this maintainer has ever received.
+    """
+    activeOnly: Boolean = true
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Whether or not to include private sponsorships in the result set
+    """
+    includePrivate: Boolean = false
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsorships returned from this connection. If left
+    blank, the sponsorships will be ordered based on relevancy to the viewer.
+    """
+    orderBy: SponsorshipOrder
+  ): SponsorshipConnection!
+
+  """
+  The sponsorships where this user or organization is the funder.
+  """
+  sponsorshipsAsSponsor(
+    """
+    Whether to include only sponsorships that are active right now, versus all sponsorships this sponsor has ever made.
+    """
+    activeOnly: Boolean = true
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter sponsorships returned to those for the specified maintainers. That
+    is, the recipient of the sponsorship is a user or organization with one of
+    the given logins.
+    """
+    maintainerLogins: [String!]
+
+    """
+    Ordering options for sponsorships returned from this connection. If left
+    blank, the sponsorships will be ordered based on relevancy to the viewer.
+    """
+    orderBy: SponsorshipOrder
+  ): SponsorshipConnection!
+
+  """
+  Find an organization's team by its slug.
+  """
+  team(
+    """
+    The name or slug of the team to find.
+    """
+    slug: String!
+  ): Team
+
+  """
+  A list of teams in this organization.
+  """
+  teams(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    If true, filters teams that are mapped to an LDAP Group (Enterprise only)
+    """
+    ldapMapped: Boolean
+
+    """
+    If non-null, filters teams according to notification setting
+    """
+    notificationSetting: TeamNotificationSetting
+
+    """
+    Ordering options for teams returned from the connection
+    """
+    orderBy: TeamOrder
+
+    """
+    If non-null, filters teams according to privacy
+    """
+    privacy: TeamPrivacy
+
+    """
+    If non-null, filters teams with query on team name and team slug
+    """
+    query: String
+
+    """
+    If non-null, filters teams according to whether the viewer is an admin or member on team
+    """
+    role: TeamRole
+
+    """
+    If true, restrict to only root teams
+    """
+    rootTeamsOnly: Boolean = false
+
+    """
+    User logins to filter by
+    """
+    userLogins: [String!]
+  ): TeamConnection!
+
+  """
+  The HTTP path listing organization's teams
+  """
+  teamsResourcePath: URI!
+
+  """
+  The HTTP URL listing organization's teams
+  """
+  teamsUrl: URI!
+
+  """
+  The amount in United States cents (e.g., 500 = $5.00 USD) that this entity has
+  spent on GitHub to fund sponsorships. Only returns a value when viewed by the
+  user themselves or by a user who can manage sponsorships for the requested organization.
+  """
+  totalSponsorshipAmountAsSponsorInCents(
+    """
+    Filter payments to those that occurred on or after this time.
+    """
+    since: DateTime
+
+    """
+    Filter payments to those made to the users or organizations with the specified usernames.
+    """
+    sponsorableLogins: [String!] = []
+
+    """
+    Filter payments to those that occurred before this time.
+    """
+    until: DateTime
+  ): Int
+
+  """
+  The organization's Twitter username.
+  """
+  twitterUsername: String
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this organization.
+  """
+  url: URI!
+
+  """
+  Organization is adminable by the viewer.
+  """
+  viewerCanAdminister: Boolean!
+
+  """
+  Can the viewer pin repositories and gists to the profile?
+  """
+  viewerCanChangePinnedItems: Boolean!
+
+  """
+  Can the current viewer create new projects on this owner.
+  """
+  viewerCanCreateProjects: Boolean!
+
+  """
+  Viewer can create repositories on this organization
+  """
+  viewerCanCreateRepositories: Boolean!
+
+  """
+  Viewer can create teams on this organization.
+  """
+  viewerCanCreateTeams: Boolean!
+
+  """
+  Whether or not the viewer is able to sponsor this user/organization.
+  """
+  viewerCanSponsor: Boolean!
+
+  """
+  Viewer is an active member of this organization.
+  """
+  viewerIsAMember: Boolean!
+
+  """
+  Whether or not this Organization is followed by the viewer.
+  """
+  viewerIsFollowing: Boolean!
+
+  """
+  True if the viewer is sponsoring this user/organization.
+  """
+  viewerIsSponsoring: Boolean!
+
+  """
+  Whether contributors are required to sign off on web-based commits for repositories in this organization.
+  """
+  webCommitSignoffRequired: Boolean!
+
+  """
+  The organization's public profile URL.
+  """
+  websiteUrl: URI
+}
+
+"""
+An audit entry in an organization audit log.
+"""
+union OrganizationAuditEntry =
+    MembersCanDeleteReposClearAuditEntry
+  | MembersCanDeleteReposDisableAuditEntry
+  | MembersCanDeleteReposEnableAuditEntry
+  | OauthApplicationCreateAuditEntry
+  | OrgAddBillingManagerAuditEntry
+  | OrgAddMemberAuditEntry
+  | OrgBlockUserAuditEntry
+  | OrgConfigDisableCollaboratorsOnlyAuditEntry
+  | OrgConfigEnableCollaboratorsOnlyAuditEntry
+  | OrgCreateAuditEntry
+  | OrgDisableOauthAppRestrictionsAuditEntry
+  | OrgDisableSamlAuditEntry
+  | OrgDisableTwoFactorRequirementAuditEntry
+  | OrgEnableOauthAppRestrictionsAuditEntry
+  | OrgEnableSamlAuditEntry
+  | OrgEnableTwoFactorRequirementAuditEntry
+  | OrgInviteMemberAuditEntry
+  | OrgInviteToBusinessAuditEntry
+  | OrgOauthAppAccessApprovedAuditEntry
+  | OrgOauthAppAccessBlockedAuditEntry
+  | OrgOauthAppAccessDeniedAuditEntry
+  | OrgOauthAppAccessRequestedAuditEntry
+  | OrgOauthAppAccessUnblockedAuditEntry
+  | OrgRemoveBillingManagerAuditEntry
+  | OrgRemoveMemberAuditEntry
+  | OrgRemoveOutsideCollaboratorAuditEntry
+  | OrgRestoreMemberAuditEntry
+  | OrgUnblockUserAuditEntry
+  | OrgUpdateDefaultRepositoryPermissionAuditEntry
+  | OrgUpdateMemberAuditEntry
+  | OrgUpdateMemberRepositoryCreationPermissionAuditEntry
+  | OrgUpdateMemberRepositoryInvitationPermissionAuditEntry
+  | PrivateRepositoryForkingDisableAuditEntry
+  | PrivateRepositoryForkingEnableAuditEntry
+  | RepoAccessAuditEntry
+  | RepoAddMemberAuditEntry
+  | RepoAddTopicAuditEntry
+  | RepoArchivedAuditEntry
+  | RepoChangeMergeSettingAuditEntry
+  | RepoConfigDisableAnonymousGitAccessAuditEntry
+  | RepoConfigDisableCollaboratorsOnlyAuditEntry
+  | RepoConfigDisableContributorsOnlyAuditEntry
+  | RepoConfigDisableSockpuppetDisallowedAuditEntry
+  | RepoConfigEnableAnonymousGitAccessAuditEntry
+  | RepoConfigEnableCollaboratorsOnlyAuditEntry
+  | RepoConfigEnableContributorsOnlyAuditEntry
+  | RepoConfigEnableSockpuppetDisallowedAuditEntry
+  | RepoConfigLockAnonymousGitAccessAuditEntry
+  | RepoConfigUnlockAnonymousGitAccessAuditEntry
+  | RepoCreateAuditEntry
+  | RepoDestroyAuditEntry
+  | RepoRemoveMemberAuditEntry
+  | RepoRemoveTopicAuditEntry
+  | RepositoryVisibilityChangeDisableAuditEntry
+  | RepositoryVisibilityChangeEnableAuditEntry
+  | TeamAddMemberAuditEntry
+  | TeamAddRepositoryAuditEntry
+  | TeamChangeParentTeamAuditEntry
+  | TeamRemoveMemberAuditEntry
+  | TeamRemoveRepositoryAuditEntry
+
+"""
+The connection type for OrganizationAuditEntry.
+"""
+type OrganizationAuditEntryConnection {
+  """
+  A list of edges.
+  """
+  edges: [OrganizationAuditEntryEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [OrganizationAuditEntry]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Metadata for an audit entry with action org.*
+"""
+interface OrganizationAuditEntryData {
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+}
+
+"""
+An edge in a connection.
+"""
+type OrganizationAuditEntryEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: OrganizationAuditEntry
+}
+
+"""
+A list of organizations managed by an enterprise.
+"""
+type OrganizationConnection {
+  """
+  A list of edges.
+  """
+  edges: [OrganizationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Organization]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type OrganizationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Organization
+}
+
+"""
+The connection type for User.
+"""
+type OrganizationEnterpriseOwnerConnection {
+  """
+  A list of edges.
+  """
+  edges: [OrganizationEnterpriseOwnerEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An enterprise owner in the context of an organization that is part of the enterprise.
+"""
+type OrganizationEnterpriseOwnerEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: User
+
+  """
+  The role of the owner with respect to the organization.
+  """
+  organizationRole: RoleInOrganization!
+}
+
+"""
+An Identity Provider configured to provision SAML and SCIM identities for
+Organizations. Visible to (1) organization owners, (2) organization owners'
+personal access tokens (classic) with read:org or admin:org scope, (3) GitHub
+App with an installation token with read or write access to members.
+"""
+type OrganizationIdentityProvider implements Node {
+  """
+  The digest algorithm used to sign SAML requests for the Identity Provider.
+  """
+  digestMethod: URI
+
+  """
+  External Identities provisioned by this Identity Provider
+  """
+  externalIdentities(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter to external identities with the users login
+    """
+    login: String
+
+    """
+    Filter to external identities with valid org membership only
+    """
+    membersOnly: Boolean
+
+    """
+    Filter to external identities with the users userName/NameID attribute
+    """
+    userName: String
+  ): ExternalIdentityConnection!
+
+  """
+  The Node ID of the OrganizationIdentityProvider object
+  """
+  id: ID!
+
+  """
+  The x509 certificate used by the Identity Provider to sign assertions and responses.
+  """
+  idpCertificate: X509Certificate
+
+  """
+  The Issuer Entity ID for the SAML Identity Provider
+  """
+  issuer: String
+
+  """
+  Organization this Identity Provider belongs to
+  """
+  organization: Organization
+
+  """
+  The signature algorithm used to sign SAML requests for the Identity Provider.
+  """
+  signatureMethod: URI
+
+  """
+  The URL endpoint for the Identity Provider's SAML SSO.
+  """
+  ssoUrl: URI
+}
+
+"""
+An Invitation for a user to an organization.
+"""
+type OrganizationInvitation implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The email address of the user invited to the organization.
+  """
+  email: String
+
+  """
+  The Node ID of the OrganizationInvitation object
+  """
+  id: ID!
+
+  """
+  The source of the invitation.
+  """
+  invitationSource: OrganizationInvitationSource!
+
+  """
+  The type of invitation that was sent (e.g. email, user).
+  """
+  invitationType: OrganizationInvitationType!
+
+  """
+  The user who was invited to the organization.
+  """
+  invitee: User
+
+  """
+  The user who created the invitation.
+  """
+  inviter: User!
+
+  """
+  The organization the invite is for
+  """
+  organization: Organization!
+
+  """
+  The user's pending role in the organization (e.g. member, owner).
+  """
+  role: OrganizationInvitationRole!
+}
+
+"""
+The connection type for OrganizationInvitation.
+"""
+type OrganizationInvitationConnection {
+  """
+  A list of edges.
+  """
+  edges: [OrganizationInvitationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [OrganizationInvitation]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type OrganizationInvitationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: OrganizationInvitation
+}
+
+"""
+The possible organization invitation roles.
+"""
+enum OrganizationInvitationRole {
+  """
+  The user is invited to be an admin of the organization.
+  """
+  ADMIN
+
+  """
+  The user is invited to be a billing manager of the organization.
+  """
+  BILLING_MANAGER
+
+  """
+  The user is invited to be a direct member of the organization.
+  """
+  DIRECT_MEMBER
+
+  """
+  The user's previous role will be reinstated.
+  """
+  REINSTATE
+}
+
+"""
+The possible organization invitation sources.
+"""
+enum OrganizationInvitationSource {
+  """
+  The invitation was created from the web interface or from API
+  """
+  MEMBER
+
+  """
+  The invitation was created from SCIM
+  """
+  SCIM
+
+  """
+  The invitation was sent before this feature was added
+  """
+  UNKNOWN
+}
+
+"""
+The possible organization invitation types.
+"""
+enum OrganizationInvitationType {
+  """
+  The invitation was to an email address.
+  """
+  EMAIL
+
+  """
+  The invitation was to an existing user.
+  """
+  USER
+}
+
+"""
+The connection type for User.
+"""
+type OrganizationMemberConnection {
+  """
+  A list of edges.
+  """
+  edges: [OrganizationMemberEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a user within an organization.
+"""
+type OrganizationMemberEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  Whether the organization member has two factor enabled or not. Returns null if information is not available to viewer.
+  """
+  hasTwoFactorEnabled: Boolean
+
+  """
+  The item at the end of the edge.
+  """
+  node: User
+
+  """
+  The role this user has in the organization.
+  """
+  role: OrganizationMemberRole
+}
+
+"""
+The possible roles within an organization for its members.
+"""
+enum OrganizationMemberRole {
+  """
+  The user is an administrator of the organization.
+  """
+  ADMIN
+
+  """
+  The user is a member of the organization.
+  """
+  MEMBER
+}
+
+"""
+The possible values for the members can create repositories setting on an organization.
+"""
+enum OrganizationMembersCanCreateRepositoriesSettingValue {
+  """
+  Members will be able to create public and private repositories.
+  """
+  ALL
+
+  """
+  Members will not be able to create public or private repositories.
+  """
+  DISABLED
+
+  """
+  Members will be able to create only internal repositories.
+  """
+  INTERNAL
+
+  """
+  Members will be able to create only private repositories.
+  """
+  PRIVATE
+}
+
+"""
+A GitHub Enterprise Importer (GEI) organization migration.
+"""
+type OrganizationMigration implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: String
+
+  """
+  The reason the organization migration failed.
+  """
+  failureReason: String
+
+  """
+  The Node ID of the OrganizationMigration object
+  """
+  id: ID!
+
+  """
+  The remaining amount of repos to be migrated.
+  """
+  remainingRepositoriesCount: Int
+
+  """
+  The name of the source organization to be migrated.
+  """
+  sourceOrgName: String!
+
+  """
+  The URL of the source organization to migrate.
+  """
+  sourceOrgUrl: URI!
+
+  """
+  The migration state.
+  """
+  state: OrganizationMigrationState!
+
+  """
+  The name of the target organization.
+  """
+  targetOrgName: String!
+
+  """
+  The total amount of repositories to be migrated.
+  """
+  totalRepositoriesCount: Int
+}
+
+"""
+The Octoshift Organization migration state.
+"""
+enum OrganizationMigrationState {
+  """
+  The Octoshift migration has failed.
+  """
+  FAILED
+
+  """
+  The Octoshift migration has invalid credentials.
+  """
+  FAILED_VALIDATION
+
+  """
+  The Octoshift migration is in progress.
+  """
+  IN_PROGRESS
+
+  """
+  The Octoshift migration has not started.
+  """
+  NOT_STARTED
+
+  """
+  The Octoshift migration needs to have its credentials validated.
+  """
+  PENDING_VALIDATION
+
+  """
+  The Octoshift migration is performing post repository migrations.
+  """
+  POST_REPO_MIGRATION
+
+  """
+  The Octoshift migration is performing pre repository migrations.
+  """
+  PRE_REPO_MIGRATION
+
+  """
+  The Octoshift migration has been queued.
+  """
+  QUEUED
+
+  """
+  The Octoshift org migration is performing repository migrations.
+  """
+  REPO_MIGRATION
+
+  """
+  The Octoshift migration has succeeded.
+  """
+  SUCCEEDED
+}
+
+"""
+Used for argument of CreateProjectV2 mutation.
+"""
+union OrganizationOrUser = Organization | User
+
+"""
+Ordering options for organization connections.
+"""
+input OrganizationOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order organizations by.
+  """
+  field: OrganizationOrderField!
+}
+
+"""
+Properties by which organization connections can be ordered.
+"""
+enum OrganizationOrderField {
+  """
+  Order organizations by creation time
+  """
+  CREATED_AT
+
+  """
+  Order organizations by login
+  """
+  LOGIN
+}
+
+"""
+An organization teams hovercard context
+"""
+type OrganizationTeamsHovercardContext implements HovercardContext {
+  """
+  A string describing this context
+  """
+  message: String!
+
+  """
+  An octicon to accompany this context
+  """
+  octicon: String!
+
+  """
+  Teams in this organization the user is a member of that are relevant
+  """
+  relevantTeams(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): TeamConnection!
+
+  """
+  The path for the full team list for this user
+  """
+  teamsResourcePath: URI!
+
+  """
+  The URL for the full team list for this user
+  """
+  teamsUrl: URI!
+
+  """
+  The total number of teams the user is on in the organization
+  """
+  totalTeamCount: Int!
+}
+
+"""
+An organization list hovercard context
+"""
+type OrganizationsHovercardContext implements HovercardContext {
+  """
+  A string describing this context
+  """
+  message: String!
+
+  """
+  An octicon to accompany this context
+  """
+  octicon: String!
+
+  """
+  Organizations this user is a member of that are relevant
+  """
+  relevantOrganizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the User's organizations.
+    """
+    orderBy: OrganizationOrder = null
+  ): OrganizationConnection!
+
+  """
+  The total number of organizations this user is in
+  """
+  totalOrganizationCount: Int!
+}
+
+"""
+Information for an uploaded package.
+"""
+type Package implements Node {
+  """
+  The Node ID of the Package object
+  """
+  id: ID!
+
+  """
+  Find the latest version for the package.
+  """
+  latestVersion: PackageVersion
+
+  """
+  Identifies the name of the package.
+  """
+  name: String!
+
+  """
+  Identifies the type of the package.
+  """
+  packageType: PackageType!
+
+  """
+  The repository this package belongs to.
+  """
+  repository: Repository
+
+  """
+  Statistics about package activity.
+  """
+  statistics: PackageStatistics
+
+  """
+  Find package version by version string.
+  """
+  version(
+    """
+    The package version.
+    """
+    version: String!
+  ): PackageVersion
+
+  """
+  list of versions for this package
+  """
+  versions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering of the returned packages.
+    """
+    orderBy: PackageVersionOrder = {field: CREATED_AT, direction: DESC}
+  ): PackageVersionConnection!
+}
+
+"""
+The connection type for Package.
+"""
+type PackageConnection {
+  """
+  A list of edges.
+  """
+  edges: [PackageEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Package]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PackageEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Package
+}
+
+"""
+A file in a package version.
+"""
+type PackageFile implements Node {
+  """
+  The Node ID of the PackageFile object
+  """
+  id: ID!
+
+  """
+  MD5 hash of the file.
+  """
+  md5: String
+
+  """
+  Name of the file.
+  """
+  name: String!
+
+  """
+  The package version this file belongs to.
+  """
+  packageVersion: PackageVersion
+
+  """
+  SHA1 hash of the file.
+  """
+  sha1: String
+
+  """
+  SHA256 hash of the file.
+  """
+  sha256: String
+
+  """
+  Size of the file in bytes.
+  """
+  size: Int
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  URL to download the asset.
+  """
+  url: URI
+}
+
+"""
+The connection type for PackageFile.
+"""
+type PackageFileConnection {
+  """
+  A list of edges.
+  """
+  edges: [PackageFileEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PackageFile]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PackageFileEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PackageFile
+}
+
+"""
+Ways in which lists of package files can be ordered upon return.
+"""
+input PackageFileOrder {
+  """
+  The direction in which to order package files by the specified field.
+  """
+  direction: OrderDirection
+
+  """
+  The field in which to order package files by.
+  """
+  field: PackageFileOrderField
+}
+
+"""
+Properties by which package file connections can be ordered.
+"""
+enum PackageFileOrderField {
+  """
+  Order package files by creation time
+  """
+  CREATED_AT
+}
+
+"""
+Ways in which lists of packages can be ordered upon return.
+"""
+input PackageOrder {
+  """
+  The direction in which to order packages by the specified field.
+  """
+  direction: OrderDirection
+
+  """
+  The field in which to order packages by.
+  """
+  field: PackageOrderField
+}
+
+"""
+Properties by which package connections can be ordered.
+"""
+enum PackageOrderField {
+  """
+  Order packages by creation time
+  """
+  CREATED_AT
+}
+
+"""
+Represents an owner of a package.
+"""
+interface PackageOwner {
+  """
+  The Node ID of the PackageOwner object
+  """
+  id: ID!
+
+  """
+  A list of packages under the owner.
+  """
+  packages(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Find packages by their names.
+    """
+    names: [String]
+
+    """
+    Ordering of the returned packages.
+    """
+    orderBy: PackageOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    Filter registry package by type.
+    """
+    packageType: PackageType
+
+    """
+    Find packages in a repository by ID.
+    """
+    repositoryId: ID
+  ): PackageConnection!
+}
+
+"""
+Represents a object that contains package activity statistics such as downloads.
+"""
+type PackageStatistics {
+  """
+  Number of times the package was downloaded since it was created.
+  """
+  downloadsTotalCount: Int!
+}
+
+"""
+A version tag contains the mapping between a tag name and a version.
+"""
+type PackageTag implements Node {
+  """
+  The Node ID of the PackageTag object
+  """
+  id: ID!
+
+  """
+  Identifies the tag name of the version.
+  """
+  name: String!
+
+  """
+  Version that the tag is associated with.
+  """
+  version: PackageVersion
+}
+
+"""
+The possible types of a package.
+"""
+enum PackageType {
+  """
+  A debian package.
+  """
+  DEBIAN
+
+  """
+  A docker image.
+  """
+  DOCKER
+    @deprecated(
+      reason: "DOCKER will be removed from this enum as this type will be migrated to only be used by the Packages REST API. Removal on 2021-06-21 UTC."
+    )
+
+  """
+  A maven package.
+  """
+  MAVEN
+    @deprecated(
+      reason: "MAVEN will be removed from this enum as this type will be migrated to only be used by the Packages REST API. Removal on 2023-02-10 UTC."
+    )
+
+  """
+  An npm package.
+  """
+  NPM
+    @deprecated(
+      reason: "NPM will be removed from this enum as this type will be migrated to only be used by the Packages REST API. Removal on 2022-11-21 UTC."
+    )
+
+  """
+  A nuget package.
+  """
+  NUGET
+    @deprecated(
+      reason: "NUGET will be removed from this enum as this type will be migrated to only be used by the Packages REST API. Removal on 2022-11-21 UTC."
+    )
+
+  """
+  A python package.
+  """
+  PYPI
+
+  """
+  A rubygems package.
+  """
+  RUBYGEMS
+    @deprecated(
+      reason: "RUBYGEMS will be removed from this enum as this type will be migrated to only be used by the Packages REST API. Removal on 2022-12-28 UTC."
+    )
+}
+
+"""
+Information about a specific package version.
+"""
+type PackageVersion implements Node {
+  """
+  List of files associated with this package version
+  """
+  files(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering of the returned package files.
+    """
+    orderBy: PackageFileOrder = {field: CREATED_AT, direction: ASC}
+  ): PackageFileConnection!
+
+  """
+  The Node ID of the PackageVersion object
+  """
+  id: ID!
+
+  """
+  The package associated with this version.
+  """
+  package: Package
+
+  """
+  The platform this version was built for.
+  """
+  platform: String
+
+  """
+  Whether or not this version is a pre-release.
+  """
+  preRelease: Boolean!
+
+  """
+  The README of this package version.
+  """
+  readme: String
+
+  """
+  The release associated with this package version.
+  """
+  release: Release
+
+  """
+  Statistics about package activity.
+  """
+  statistics: PackageVersionStatistics
+
+  """
+  The package version summary.
+  """
+  summary: String
+
+  """
+  The version string.
+  """
+  version: String!
+}
+
+"""
+The connection type for PackageVersion.
+"""
+type PackageVersionConnection {
+  """
+  A list of edges.
+  """
+  edges: [PackageVersionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PackageVersion]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PackageVersionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PackageVersion
+}
+
+"""
+Ways in which lists of package versions can be ordered upon return.
+"""
+input PackageVersionOrder {
+  """
+  The direction in which to order package versions by the specified field.
+  """
+  direction: OrderDirection
+
+  """
+  The field in which to order package versions by.
+  """
+  field: PackageVersionOrderField
+}
+
+"""
+Properties by which package version connections can be ordered.
+"""
+enum PackageVersionOrderField {
+  """
+  Order package versions by creation time
+  """
+  CREATED_AT
+}
+
+"""
+Represents a object that contains package version activity statistics such as downloads.
+"""
+type PackageVersionStatistics {
+  """
+  Number of times the package was downloaded since it was created.
+  """
+  downloadsTotalCount: Int!
+}
+
+"""
+Information about pagination in a connection.
+"""
+type PageInfo {
+  """
+  When paginating forwards, the cursor to continue.
+  """
+  endCursor: String
+
+  """
+  When paginating forwards, are there more items?
+  """
+  hasNextPage: Boolean!
+
+  """
+  When paginating backwards, are there more items?
+  """
+  hasPreviousPage: Boolean!
+
+  """
+  When paginating backwards, the cursor to continue.
+  """
+  startCursor: String
+}
+
+"""
+The possible types of patch statuses.
+"""
+enum PatchStatus {
+  """
+  The file was added. Git status 'A'.
+  """
+  ADDED
+
+  """
+  The file's type was changed. Git status 'T'.
+  """
+  CHANGED
+
+  """
+  The file was copied. Git status 'C'.
+  """
+  COPIED
+
+  """
+  The file was deleted. Git status 'D'.
+  """
+  DELETED
+
+  """
+  The file's contents were changed. Git status 'M'.
+  """
+  MODIFIED
+
+  """
+  The file was renamed. Git status 'R'.
+  """
+  RENAMED
+}
+
+"""
+Types that can grant permissions on a repository to a user
+"""
+union PermissionGranter = Organization | Repository | Team
+
+"""
+A level of permission and source for a user's access to a repository.
+"""
+type PermissionSource {
+  """
+  The organization the repository belongs to.
+  """
+  organization: Organization!
+
+  """
+  The level of access this source has granted to the user.
+  """
+  permission: DefaultRepositoryPermissionField!
+
+  """
+  The name of the role this source has granted to the user.
+  """
+  roleName: String
+
+  """
+  The source of this permission.
+  """
+  source: PermissionGranter!
+}
+
+"""
+Autogenerated input type of PinIssue
+"""
+input PinIssueInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the issue to be pinned
+  """
+  issueId: ID! @possibleTypes(concreteTypes: ["Issue"])
+}
+
+"""
+Autogenerated return type of PinIssue
+"""
+type PinIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The issue that was pinned
+  """
+  issue: Issue
+}
+
+"""
+Types that can be pinned to a profile page.
+"""
+union PinnableItem = Gist | Repository
+
+"""
+The connection type for PinnableItem.
+"""
+type PinnableItemConnection {
+  """
+  A list of edges.
+  """
+  edges: [PinnableItemEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PinnableItem]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PinnableItemEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PinnableItem
+}
+
+"""
+Represents items that can be pinned to a profile page or dashboard.
+"""
+enum PinnableItemType {
+  """
+  A gist.
+  """
+  GIST
+
+  """
+  An issue.
+  """
+  ISSUE
+
+  """
+  An organization.
+  """
+  ORGANIZATION
+
+  """
+  A project.
+  """
+  PROJECT
+
+  """
+  A pull request.
+  """
+  PULL_REQUEST
+
+  """
+  A repository.
+  """
+  REPOSITORY
+
+  """
+  A team.
+  """
+  TEAM
+
+  """
+  A user.
+  """
+  USER
+}
+
+"""
+A Pinned Discussion is a discussion pinned to a repository's index page.
+"""
+type PinnedDiscussion implements Node & RepositoryNode {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The discussion that was pinned.
+  """
+  discussion: Discussion!
+
+  """
+  Color stops of the chosen gradient
+  """
+  gradientStopColors: [String!]!
+
+  """
+  The Node ID of the PinnedDiscussion object
+  """
+  id: ID!
+
+  """
+  Background texture pattern
+  """
+  pattern: PinnedDiscussionPattern!
+
+  """
+  The actor that pinned this discussion.
+  """
+  pinnedBy: Actor!
+
+  """
+  Preconfigured background gradient option
+  """
+  preconfiguredGradient: PinnedDiscussionGradient
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for PinnedDiscussion.
+"""
+type PinnedDiscussionConnection {
+  """
+  A list of edges.
+  """
+  edges: [PinnedDiscussionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PinnedDiscussion]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PinnedDiscussionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PinnedDiscussion
+}
+
+"""
+Preconfigured gradients that may be used to style discussions pinned within a repository.
+"""
+enum PinnedDiscussionGradient {
+  """
+  A gradient of blue to mint
+  """
+  BLUE_MINT
+
+  """
+  A gradient of blue to purple
+  """
+  BLUE_PURPLE
+
+  """
+  A gradient of pink to blue
+  """
+  PINK_BLUE
+
+  """
+  A gradient of purple to coral
+  """
+  PURPLE_CORAL
+
+  """
+  A gradient of red to orange
+  """
+  RED_ORANGE
+}
+
+"""
+Preconfigured background patterns that may be used to style discussions pinned within a repository.
+"""
+enum PinnedDiscussionPattern {
+  """
+  An upward-facing chevron pattern
+  """
+  CHEVRON_UP
+
+  """
+  A hollow dot pattern
+  """
+  DOT
+
+  """
+  A solid dot pattern
+  """
+  DOT_FILL
+
+  """
+  A heart pattern
+  """
+  HEART_FILL
+
+  """
+  A plus sign pattern
+  """
+  PLUS
+
+  """
+  A lightning bolt pattern
+  """
+  ZAP
+}
+
+"""
+Represents a 'pinned' event on a given issue or pull request.
+"""
+type PinnedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the PinnedEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the issue associated with the event.
+  """
+  issue: Issue!
+}
+
+"""
+A Pinned Issue is a issue pinned to a repository's index page.
+"""
+type PinnedIssue implements Node {
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  Identifies the primary key from the database as a BigInt.
+  """
+  fullDatabaseId: BigInt
+
+  """
+  The Node ID of the PinnedIssue object
+  """
+  id: ID!
+
+  """
+  The issue that was pinned.
+  """
+  issue: Issue!
+
+  """
+  The actor that pinned this issue.
+  """
+  pinnedBy: Actor!
+
+  """
+  The repository that this issue was pinned to.
+  """
+  repository: Repository!
+}
+
+"""
+The connection type for PinnedIssue.
+"""
+type PinnedIssueConnection {
+  """
+  A list of edges.
+  """
+  edges: [PinnedIssueEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PinnedIssue]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PinnedIssueEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PinnedIssue
+}
+
+"""
+An ISO-8601 encoded UTC date string with millisecond precision.
+"""
+scalar PreciseDateTime
+
+"""
+Audit log entry for a private_repository_forking.disable event.
+"""
+type PrivateRepositoryForkingDisableAuditEntry implements AuditEntry & EnterpriseAuditEntryData & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The HTTP path for this enterprise.
+  """
+  enterpriseResourcePath: URI
+
+  """
+  The slug of the enterprise.
+  """
+  enterpriseSlug: String
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  enterpriseUrl: URI
+
+  """
+  The Node ID of the PrivateRepositoryForkingDisableAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a private_repository_forking.enable event.
+"""
+type PrivateRepositoryForkingEnableAuditEntry implements AuditEntry & EnterpriseAuditEntryData & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The HTTP path for this enterprise.
+  """
+  enterpriseResourcePath: URI
+
+  """
+  The slug of the enterprise.
+  """
+  enterpriseSlug: String
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  enterpriseUrl: URI
+
+  """
+  The Node ID of the PrivateRepositoryForkingEnableAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+A curatable list of repositories relating to a repository owner, which defaults
+to showing the most popular repositories they own.
+"""
+type ProfileItemShowcase {
+  """
+  Whether or not the owner has pinned any repositories or gists.
+  """
+  hasPinnedItems: Boolean!
+
+  """
+  The repositories and gists in the showcase. If the profile owner has any
+  pinned items, those will be returned. Otherwise, the profile owner's popular
+  repositories will be returned.
+  """
+  items(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PinnableItemConnection!
+}
+
+"""
+Represents any entity on GitHub that has a profile page.
+"""
+interface ProfileOwner {
+  """
+  Determine if this repository owner has any items that can be pinned to their profile.
+  """
+  anyPinnableItems(
+    """
+    Filter to only a particular kind of pinnable item.
+    """
+    type: PinnableItemType
+  ): Boolean!
+
+  """
+  The public profile email.
+  """
+  email: String
+
+  """
+  The Node ID of the ProfileOwner object
+  """
+  id: ID!
+
+  """
+  Showcases a selection of repositories and gists that the profile owner has
+  either curated or that have been selected automatically based on popularity.
+  """
+  itemShowcase: ProfileItemShowcase!
+
+  """
+  The public profile location.
+  """
+  location: String
+
+  """
+  The username used to login.
+  """
+  login: String!
+
+  """
+  The public profile name.
+  """
+  name: String
+
+  """
+  A list of repositories and gists this profile owner can pin to their profile.
+  """
+  pinnableItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter the types of pinnable items that are returned.
+    """
+    types: [PinnableItemType!]
+  ): PinnableItemConnection!
+
+  """
+  A list of repositories and gists this profile owner has pinned to their profile
+  """
+  pinnedItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter the types of pinned items that are returned.
+    """
+    types: [PinnableItemType!]
+  ): PinnableItemConnection!
+
+  """
+  Returns how many more items this profile owner can pin to their profile.
+  """
+  pinnedItemsRemaining: Int!
+
+  """
+  Can the viewer pin repositories and gists to the profile?
+  """
+  viewerCanChangePinnedItems: Boolean!
+
+  """
+  The public profile website URL.
+  """
+  websiteUrl: URI
+}
+
+"""
+Projects manage issues, pull requests and notes within a project owner.
+"""
+type Project implements Closable & Node & Updatable {
+  """
+  The project's description body.
+  """
+  body: String
+
+  """
+  The projects description body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  Indicates if the object is closed (definition of closed may depend on type)
+  """
+  closed: Boolean!
+
+  """
+  Identifies the date and time when the object was closed.
+  """
+  closedAt: DateTime
+
+  """
+  List of columns in the project
+  """
+  columns(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectColumnConnection!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who originally created the project.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the Project object
+  """
+  id: ID!
+
+  """
+  The project's name.
+  """
+  name: String!
+
+  """
+  The project's number.
+  """
+  number: Int!
+
+  """
+  The project's owner. Currently limited to repositories, organizations, and users.
+  """
+  owner: ProjectOwner!
+
+  """
+  List of pending cards in this project
+  """
+  pendingCards(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    A list of archived states to filter the cards by
+    """
+    archivedStates: [ProjectCardArchivedState] = [ARCHIVED, NOT_ARCHIVED]
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectCardConnection!
+
+  """
+  Project progress details.
+  """
+  progress: ProjectProgress!
+
+  """
+  The HTTP path for this project
+  """
+  resourcePath: URI!
+
+  """
+  Whether the project is open or closed.
+  """
+  state: ProjectState!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this project
+  """
+  url: URI!
+
+  """
+  Indicates if the object can be closed by the viewer.
+  """
+  viewerCanClose: Boolean!
+
+  """
+  Indicates if the object can be reopened by the viewer.
+  """
+  viewerCanReopen: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+}
+
+"""
+A card in a project.
+"""
+type ProjectCard implements Node {
+  """
+  The project column this card is associated under. A card may only belong to one
+  project column at a time. The column field will be null if the card is created
+  in a pending state and has yet to be associated with a column. Once cards are
+  associated with a column, they will not become pending in the future.
+  """
+  column: ProjectColumn
+
+  """
+  The card content item
+  """
+  content: ProjectCardItem
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created this card
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the ProjectCard object
+  """
+  id: ID!
+
+  """
+  Whether the card is archived
+  """
+  isArchived: Boolean!
+
+  """
+  The card note
+  """
+  note: String
+
+  """
+  The project that contains this card.
+  """
+  project: Project!
+
+  """
+  The HTTP path for this card
+  """
+  resourcePath: URI!
+
+  """
+  The state of ProjectCard
+  """
+  state: ProjectCardState
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this card
+  """
+  url: URI!
+}
+
+"""
+The possible archived states of a project card.
+"""
+enum ProjectCardArchivedState {
+  """
+  A project card that is archived
+  """
+  ARCHIVED
+
+  """
+  A project card that is not archived
+  """
+  NOT_ARCHIVED
+}
+
+"""
+The connection type for ProjectCard.
+"""
+type ProjectCardConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectCardEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectCard]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectCardEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectCard
+}
+
+"""
+An issue or PR and its owning repository to be used in a project card.
+"""
+input ProjectCardImport {
+  """
+  The issue or pull request number.
+  """
+  number: Int!
+
+  """
+  Repository name with owner (owner/repository).
+  """
+  repository: String!
+}
+
+"""
+Types that can be inside Project Cards.
+"""
+union ProjectCardItem = Issue | PullRequest
+
+"""
+Various content states of a ProjectCard
+"""
+enum ProjectCardState {
+  """
+  The card has content only.
+  """
+  CONTENT_ONLY
+
+  """
+  The card has a note only.
+  """
+  NOTE_ONLY
+
+  """
+  The card is redacted.
+  """
+  REDACTED
+}
+
+"""
+A column inside a project.
+"""
+type ProjectColumn implements Node {
+  """
+  List of cards in the column
+  """
+  cards(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    A list of archived states to filter the cards by
+    """
+    archivedStates: [ProjectCardArchivedState] = [ARCHIVED, NOT_ARCHIVED]
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectCardConnection!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the ProjectColumn object
+  """
+  id: ID!
+
+  """
+  The project column's name.
+  """
+  name: String!
+
+  """
+  The project that contains this column.
+  """
+  project: Project!
+
+  """
+  The semantic purpose of the column
+  """
+  purpose: ProjectColumnPurpose
+
+  """
+  The HTTP path for this project column
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this project column
+  """
+  url: URI!
+}
+
+"""
+The connection type for ProjectColumn.
+"""
+type ProjectColumnConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectColumnEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectColumn]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectColumnEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectColumn
+}
+
+"""
+A project column and a list of its issues and PRs.
+"""
+input ProjectColumnImport {
+  """
+  The name of the column.
+  """
+  columnName: String!
+
+  """
+  A list of issues and pull requests in the column.
+  """
+  issues: [ProjectCardImport!]
+
+  """
+  The position of the column, starting from 0.
+  """
+  position: Int!
+}
+
+"""
+The semantic purpose of the column - todo, in progress, or done.
+"""
+enum ProjectColumnPurpose {
+  """
+  The column contains cards which are complete
+  """
+  DONE
+
+  """
+  The column contains cards which are currently being worked on
+  """
+  IN_PROGRESS
+
+  """
+  The column contains cards still to be worked on
+  """
+  TODO
+}
+
+"""
+A list of projects associated with the owner.
+"""
+type ProjectConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Project]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Project
+}
+
+"""
+Ways in which lists of projects can be ordered upon return.
+"""
+input ProjectOrder {
+  """
+  The direction in which to order projects by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order projects by.
+  """
+  field: ProjectOrderField!
+}
+
+"""
+Properties by which project connections can be ordered.
+"""
+enum ProjectOrderField {
+  """
+  Order projects by creation time
+  """
+  CREATED_AT
+
+  """
+  Order projects by name
+  """
+  NAME
+
+  """
+  Order projects by update time
+  """
+  UPDATED_AT
+}
+
+"""
+Represents an owner of a Project.
+"""
+interface ProjectOwner {
+  """
+  The Node ID of the ProjectOwner object
+  """
+  id: ID!
+
+  """
+  Find project by number.
+  """
+  project(
+    """
+    The project number to find.
+    """
+    number: Int!
+  ): Project
+
+  """
+  A list of projects under the owner.
+  """
+  projects(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for projects returned from the connection
+    """
+    orderBy: ProjectOrder
+
+    """
+    Query to search projects by, currently only searching by name.
+    """
+    search: String
+
+    """
+    A list of states to filter the projects by.
+    """
+    states: [ProjectState!]
+  ): ProjectConnection!
+
+  """
+  The HTTP path listing owners projects
+  """
+  projectsResourcePath: URI!
+
+  """
+  The HTTP URL listing owners projects
+  """
+  projectsUrl: URI!
+
+  """
+  Can the current viewer create new projects on this owner.
+  """
+  viewerCanCreateProjects: Boolean!
+}
+
+"""
+Project progress stats.
+"""
+type ProjectProgress {
+  """
+  The number of done cards.
+  """
+  doneCount: Int!
+
+  """
+  The percentage of done cards.
+  """
+  donePercentage: Float!
+
+  """
+  Whether progress tracking is enabled and cards with purpose exist for this project
+  """
+  enabled: Boolean!
+
+  """
+  The number of in-progress cards.
+  """
+  inProgressCount: Int!
+
+  """
+  The percentage of in-progress cards.
+  """
+  inProgressPercentage: Float!
+
+  """
+  The number of to do cards.
+  """
+  todoCount: Int!
+
+  """
+  The percentage of to do cards.
+  """
+  todoPercentage: Float!
+}
+
+"""
+State of the project; either 'open' or 'closed'
+"""
+enum ProjectState {
+  """
+  The project is closed.
+  """
+  CLOSED
+
+  """
+  The project is open.
+  """
+  OPEN
+}
+
+"""
+GitHub-provided templates for Projects
+"""
+enum ProjectTemplate {
+  """
+  Create a board with v2 triggers to automatically move cards across To do, In progress and Done columns.
+  """
+  AUTOMATED_KANBAN_V2
+
+  """
+  Create a board with triggers to automatically move cards across columns with review automation.
+  """
+  AUTOMATED_REVIEWS_KANBAN
+
+  """
+  Create a board with columns for To do, In progress and Done.
+  """
+  BASIC_KANBAN
+
+  """
+  Create a board to triage and prioritize bugs with To do, priority, and Done columns.
+  """
+  BUG_TRIAGE
+}
+
+"""
+New projects that manage issues, pull requests and drafts using tables and boards.
+"""
+type ProjectV2 implements Closable & Node & Updatable {
+  """
+  Returns true if the project is closed.
+  """
+  closed: Boolean!
+
+  """
+  Identifies the date and time when the object was closed.
+  """
+  closedAt: DateTime
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who originally created the project.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  A field of the project
+  """
+  field(
+    """
+    The name of the field
+    """
+    name: String!
+  ): ProjectV2FieldConfiguration
+
+  """
+  List of fields and their constraints in the project
+  """
+  fields(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for project v2 fields returned from the connection
+    """
+    orderBy: ProjectV2FieldOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2FieldConfigurationConnection!
+
+  """
+  The Node ID of the ProjectV2 object
+  """
+  id: ID!
+
+  """
+  List of items in the project
+  """
+  items(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for project v2 items returned from the connection
+    """
+    orderBy: ProjectV2ItemOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2ItemConnection!
+
+  """
+  The project's number.
+  """
+  number: Int!
+
+  """
+  The project's owner. Currently limited to organizations and users.
+  """
+  owner: ProjectV2Owner!
+
+  """
+  Returns true if the project is public.
+  """
+  public: Boolean!
+
+  """
+  The project's readme.
+  """
+  readme: String
+
+  """
+  The repositories the project is linked to.
+  """
+  repositories(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories returned from the connection
+    """
+    orderBy: RepositoryOrder = {field: CREATED_AT, direction: DESC}
+  ): RepositoryConnection!
+
+  """
+  The HTTP path for this project
+  """
+  resourcePath: URI!
+
+  """
+  The project's short description.
+  """
+  shortDescription: String
+
+  """
+  The teams the project is linked to.
+  """
+  teams(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for teams returned from this connection.
+    """
+    orderBy: TeamOrder = {field: NAME, direction: ASC}
+  ): TeamConnection!
+
+  """
+  Returns true if this project is a template.
+  """
+  template: Boolean!
+
+  """
+  The project's name.
+  """
+  title: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this project
+  """
+  url: URI!
+
+  """
+  A view of the project
+  """
+  view(
+    """
+    The number of a view belonging to the project
+    """
+    number: Int!
+  ): ProjectV2View
+
+  """
+  Indicates if the object can be closed by the viewer.
+  """
+  viewerCanClose: Boolean!
+
+  """
+  Indicates if the object can be reopened by the viewer.
+  """
+  viewerCanReopen: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  List of views in the project
+  """
+  views(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for project v2 views returned from the connection
+    """
+    orderBy: ProjectV2ViewOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2ViewConnection!
+
+  """
+  A workflow of the project
+  """
+  workflow(
+    """
+    The number of a workflow belonging to the project
+    """
+    number: Int!
+  ): ProjectV2Workflow
+
+  """
+  List of the workflows in the project
+  """
+  workflows(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for project v2 workflows returned from the connection
+    """
+    orderBy: ProjectV2WorkflowOrder = {field: NAME, direction: ASC}
+  ): ProjectV2WorkflowConnection!
+}
+
+"""
+Possible collaborators for a project.
+"""
+union ProjectV2Actor = Team | User
+
+"""
+The connection type for ProjectV2Actor.
+"""
+type ProjectV2ActorConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2ActorEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2Actor]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectV2ActorEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2Actor
+}
+
+"""
+A collaborator to update on a project. Only one of the userId or teamId should be provided.
+"""
+input ProjectV2Collaborator {
+  """
+  The role to grant the collaborator
+  """
+  role: ProjectV2Roles!
+
+  """
+  The ID of the team as a collaborator.
+  """
+  teamId: ID @possibleTypes(concreteTypes: ["Team"])
+
+  """
+  The ID of the user as a collaborator.
+  """
+  userId: ID @possibleTypes(concreteTypes: ["User"])
+}
+
+"""
+The connection type for ProjectV2.
+"""
+type ProjectV2Connection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2Edge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+The type of a project field.
+"""
+enum ProjectV2CustomFieldType {
+  """
+  Date
+  """
+  DATE
+
+  """
+  Number
+  """
+  NUMBER
+
+  """
+  Single Select
+  """
+  SINGLE_SELECT
+
+  """
+  Text
+  """
+  TEXT
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectV2Edge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2
+}
+
+"""
+A field inside a project.
+"""
+type ProjectV2Field implements Node & ProjectV2FieldCommon {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The field's type.
+  """
+  dataType: ProjectV2FieldType!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the ProjectV2Field object
+  """
+  id: ID!
+
+  """
+  The project field's name.
+  """
+  name: String!
+
+  """
+  The project that contains this field.
+  """
+  project: ProjectV2!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+Common fields across different project field types
+"""
+interface ProjectV2FieldCommon {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The field's type.
+  """
+  dataType: ProjectV2FieldType!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the ProjectV2FieldCommon object
+  """
+  id: ID!
+
+  """
+  The project field's name.
+  """
+  name: String!
+
+  """
+  The project that contains this field.
+  """
+  project: ProjectV2!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+Configurations for project fields.
+"""
+union ProjectV2FieldConfiguration = ProjectV2Field | ProjectV2IterationField | ProjectV2SingleSelectField
+
+"""
+The connection type for ProjectV2FieldConfiguration.
+"""
+type ProjectV2FieldConfigurationConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2FieldConfigurationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2FieldConfiguration]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectV2FieldConfigurationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2FieldConfiguration
+}
+
+"""
+The connection type for ProjectV2Field.
+"""
+type ProjectV2FieldConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2FieldEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2Field]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectV2FieldEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2Field
+}
+
+"""
+Ordering options for project v2 field connections
+"""
+input ProjectV2FieldOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order the project v2 fields by.
+  """
+  field: ProjectV2FieldOrderField!
+}
+
+"""
+Properties by which project v2 field connections can be ordered.
+"""
+enum ProjectV2FieldOrderField {
+  """
+  Order project v2 fields by creation time
+  """
+  CREATED_AT
+
+  """
+  Order project v2 fields by name
+  """
+  NAME
+
+  """
+  Order project v2 fields by position
+  """
+  POSITION
+}
+
+"""
+The type of a project field.
+"""
+enum ProjectV2FieldType {
+  """
+  Assignees
+  """
+  ASSIGNEES
+
+  """
+  Date
+  """
+  DATE
+
+  """
+  Iteration
+  """
+  ITERATION
+
+  """
+  Labels
+  """
+  LABELS
+
+  """
+  Linked Pull Requests
+  """
+  LINKED_PULL_REQUESTS
+
+  """
+  Milestone
+  """
+  MILESTONE
+
+  """
+  Number
+  """
+  NUMBER
+
+  """
+  Repository
+  """
+  REPOSITORY
+
+  """
+  Reviewers
+  """
+  REVIEWERS
+
+  """
+  Single Select
+  """
+  SINGLE_SELECT
+
+  """
+  Text
+  """
+  TEXT
+
+  """
+  Title
+  """
+  TITLE
+
+  """
+  Tracked by
+  """
+  TRACKED_BY
+
+  """
+  Tracks
+  """
+  TRACKS
+}
+
+"""
+The values that can be used to update a field of an item inside a Project. Only 1 value can be updated at a time.
+"""
+input ProjectV2FieldValue {
+  """
+  The ISO 8601 date to set on the field.
+  """
+  date: Date
+
+  """
+  The id of the iteration to set on the field.
+  """
+  iterationId: String
+
+  """
+  The number to set on the field.
+  """
+  number: Float
+
+  """
+  The id of the single select option to set on the field.
+  """
+  singleSelectOptionId: String
+
+  """
+  The text to set on the field.
+  """
+  text: String
+}
+
+"""
+Ways in which to filter lists of projects.
+"""
+input ProjectV2Filters {
+  """
+  List project v2 filtered by the state given.
+  """
+  state: ProjectV2State
+}
+
+"""
+An item within a Project.
+"""
+type ProjectV2Item implements Node {
+  """
+  The content of the referenced draft issue, issue, or pull request
+  """
+  content: ProjectV2ItemContent
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created the item.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The field value of the first project field which matches the 'name' argument that is set on the item.
+  """
+  fieldValueByName(
+    """
+    The name of the field to return the field value of
+    """
+    name: String!
+  ): ProjectV2ItemFieldValue
+
+  """
+  The field values that are set on the item.
+  """
+  fieldValues(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for project v2 item field values returned from the connection
+    """
+    orderBy: ProjectV2ItemFieldValueOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2ItemFieldValueConnection!
+
+  """
+  The Node ID of the ProjectV2Item object
+  """
+  id: ID!
+
+  """
+  Whether the item is archived.
+  """
+  isArchived: Boolean!
+
+  """
+  The project that contains this item.
+  """
+  project: ProjectV2!
+
+  """
+  The type of the item.
+  """
+  type: ProjectV2ItemType!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for ProjectV2Item.
+"""
+type ProjectV2ItemConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2ItemEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2Item]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Types that can be inside Project Items.
+"""
+union ProjectV2ItemContent = DraftIssue | Issue | PullRequest
+
+"""
+An edge in a connection.
+"""
+type ProjectV2ItemEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2Item
+}
+
+"""
+The value of a date field in a Project item.
+"""
+type ProjectV2ItemFieldDateValue implements Node & ProjectV2ItemFieldValueCommon {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created the item.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  Date value for the field
+  """
+  date: Date
+
+  """
+  The project field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The Node ID of the ProjectV2ItemFieldDateValue object
+  """
+  id: ID!
+
+  """
+  The project item that contains this value.
+  """
+  item: ProjectV2Item!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The value of an iteration field in a Project item.
+"""
+type ProjectV2ItemFieldIterationValue implements Node & ProjectV2ItemFieldValueCommon {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created the item.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The duration of the iteration in days.
+  """
+  duration: Int!
+
+  """
+  The project field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The Node ID of the ProjectV2ItemFieldIterationValue object
+  """
+  id: ID!
+
+  """
+  The project item that contains this value.
+  """
+  item: ProjectV2Item!
+
+  """
+  The ID of the iteration.
+  """
+  iterationId: String!
+
+  """
+  The start date of the iteration.
+  """
+  startDate: Date!
+
+  """
+  The title of the iteration.
+  """
+  title: String!
+
+  """
+  The title of the iteration, with HTML.
+  """
+  titleHTML: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The value of the labels field in a Project item.
+"""
+type ProjectV2ItemFieldLabelValue {
+  """
+  The field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  Labels value of a field
+  """
+  labels(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): LabelConnection
+}
+
+"""
+The value of a milestone field in a Project item.
+"""
+type ProjectV2ItemFieldMilestoneValue {
+  """
+  The field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  Milestone value of a field
+  """
+  milestone: Milestone
+}
+
+"""
+The value of a number field in a Project item.
+"""
+type ProjectV2ItemFieldNumberValue implements Node & ProjectV2ItemFieldValueCommon {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created the item.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The project field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The Node ID of the ProjectV2ItemFieldNumberValue object
+  """
+  id: ID!
+
+  """
+  The project item that contains this value.
+  """
+  item: ProjectV2Item!
+
+  """
+  Number as a float(8)
+  """
+  number: Float
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The value of a pull request field in a Project item.
+"""
+type ProjectV2ItemFieldPullRequestValue {
+  """
+  The field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The pull requests for this field
+  """
+  pullRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pull requests.
+    """
+    orderBy: PullRequestOrder = {field: CREATED_AT, direction: ASC}
+  ): PullRequestConnection
+}
+
+"""
+The value of a repository field in a Project item.
+"""
+type ProjectV2ItemFieldRepositoryValue {
+  """
+  The field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The repository for this field.
+  """
+  repository: Repository
+}
+
+"""
+The value of a reviewers field in a Project item.
+"""
+type ProjectV2ItemFieldReviewerValue {
+  """
+  The field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The reviewers for this field.
+  """
+  reviewers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): RequestedReviewerConnection
+}
+
+"""
+The value of a single select field in a Project item.
+"""
+type ProjectV2ItemFieldSingleSelectValue implements Node & ProjectV2ItemFieldValueCommon {
+  """
+  The color applied to the selected single-select option.
+  """
+  color: ProjectV2SingleSelectFieldOptionColor!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created the item.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  A plain-text description of the selected single-select option, such as what the option means.
+  """
+  description: String
+
+  """
+  The description of the selected single-select option, including HTML tags.
+  """
+  descriptionHTML: String
+
+  """
+  The project field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The Node ID of the ProjectV2ItemFieldSingleSelectValue object
+  """
+  id: ID!
+
+  """
+  The project item that contains this value.
+  """
+  item: ProjectV2Item!
+
+  """
+  The name of the selected single select option.
+  """
+  name: String
+
+  """
+  The html name of the selected single select option.
+  """
+  nameHTML: String
+
+  """
+  The id of the selected single select option.
+  """
+  optionId: String
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The value of a text field in a Project item.
+"""
+type ProjectV2ItemFieldTextValue implements Node & ProjectV2ItemFieldValueCommon {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created the item.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The project field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The Node ID of the ProjectV2ItemFieldTextValue object
+  """
+  id: ID!
+
+  """
+  The project item that contains this value.
+  """
+  item: ProjectV2Item!
+
+  """
+  Text value of a field
+  """
+  text: String
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The value of a user field in a Project item.
+"""
+type ProjectV2ItemFieldUserValue {
+  """
+  The field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The users for this field
+  """
+  users(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection
+}
+
+"""
+Project field values
+"""
+union ProjectV2ItemFieldValue =
+    ProjectV2ItemFieldDateValue
+  | ProjectV2ItemFieldIterationValue
+  | ProjectV2ItemFieldLabelValue
+  | ProjectV2ItemFieldMilestoneValue
+  | ProjectV2ItemFieldNumberValue
+  | ProjectV2ItemFieldPullRequestValue
+  | ProjectV2ItemFieldRepositoryValue
+  | ProjectV2ItemFieldReviewerValue
+  | ProjectV2ItemFieldSingleSelectValue
+  | ProjectV2ItemFieldTextValue
+  | ProjectV2ItemFieldUserValue
+
+"""
+Common fields across different project field value types
+"""
+interface ProjectV2ItemFieldValueCommon {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created the item.
+  """
+  creator: Actor
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The project field that contains this value.
+  """
+  field: ProjectV2FieldConfiguration!
+
+  """
+  The Node ID of the ProjectV2ItemFieldValueCommon object
+  """
+  id: ID!
+
+  """
+  The project item that contains this value.
+  """
+  item: ProjectV2Item!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for ProjectV2ItemFieldValue.
+"""
+type ProjectV2ItemFieldValueConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2ItemFieldValueEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2ItemFieldValue]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectV2ItemFieldValueEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2ItemFieldValue
+}
+
+"""
+Ordering options for project v2 item field value connections
+"""
+input ProjectV2ItemFieldValueOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order the project v2 item field values by.
+  """
+  field: ProjectV2ItemFieldValueOrderField!
+}
+
+"""
+Properties by which project v2 item field value connections can be ordered.
+"""
+enum ProjectV2ItemFieldValueOrderField {
+  """
+  Order project v2 item field values by the their position in the project
+  """
+  POSITION
+}
+
+"""
+Ordering options for project v2 item connections
+"""
+input ProjectV2ItemOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order the project v2 items by.
+  """
+  field: ProjectV2ItemOrderField!
+}
+
+"""
+Properties by which project v2 item connections can be ordered.
+"""
+enum ProjectV2ItemOrderField {
+  """
+  Order project v2 items by the their position in the project
+  """
+  POSITION
+}
+
+"""
+The type of a project item.
+"""
+enum ProjectV2ItemType {
+  """
+  Draft Issue
+  """
+  DRAFT_ISSUE
+
+  """
+  Issue
+  """
+  ISSUE
+
+  """
+  Pull Request
+  """
+  PULL_REQUEST
+
+  """
+  Redacted Item
+  """
+  REDACTED
+}
+
+"""
+An iteration field inside a project.
+"""
+type ProjectV2IterationField implements Node & ProjectV2FieldCommon {
+  """
+  Iteration configuration settings
+  """
+  configuration: ProjectV2IterationFieldConfiguration!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The field's type.
+  """
+  dataType: ProjectV2FieldType!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the ProjectV2IterationField object
+  """
+  id: ID!
+
+  """
+  The project field's name.
+  """
+  name: String!
+
+  """
+  The project that contains this field.
+  """
+  project: ProjectV2!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+Iteration field configuration for a project.
+"""
+type ProjectV2IterationFieldConfiguration {
+  """
+  The iteration's completed iterations
+  """
+  completedIterations: [ProjectV2IterationFieldIteration!]!
+
+  """
+  The iteration's duration in days
+  """
+  duration: Int!
+
+  """
+  The iteration's iterations
+  """
+  iterations: [ProjectV2IterationFieldIteration!]!
+
+  """
+  The iteration's start day of the week
+  """
+  startDay: Int!
+}
+
+"""
+Iteration field iteration settings for a project.
+"""
+type ProjectV2IterationFieldIteration {
+  """
+  The iteration's duration in days
+  """
+  duration: Int!
+
+  """
+  The iteration's ID.
+  """
+  id: String!
+
+  """
+  The iteration's start date
+  """
+  startDate: Date!
+
+  """
+  The iteration's title.
+  """
+  title: String!
+
+  """
+  The iteration's html title.
+  """
+  titleHTML: String!
+}
+
+"""
+Ways in which lists of projects can be ordered upon return.
+"""
+input ProjectV2Order {
+  """
+  The direction in which to order projects by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order projects by.
+  """
+  field: ProjectV2OrderField!
+}
+
+"""
+Properties by which projects can be ordered.
+"""
+enum ProjectV2OrderField {
+  """
+  The project's date and time of creation
+  """
+  CREATED_AT
+
+  """
+  The project's number
+  """
+  NUMBER
+
+  """
+  The project's title
+  """
+  TITLE
+
+  """
+  The project's date and time of update
+  """
+  UPDATED_AT
+}
+
+"""
+Represents an owner of a project (beta).
+"""
+interface ProjectV2Owner {
+  """
+  The Node ID of the ProjectV2Owner object
+  """
+  id: ID!
+
+  """
+  Find a project by number.
+  """
+  projectV2(
+    """
+    The project number.
+    """
+    number: Int!
+  ): ProjectV2
+
+  """
+  A list of projects under the owner.
+  """
+  projectsV2(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    How to order the returned projects.
+    """
+    orderBy: ProjectV2Order = {field: NUMBER, direction: DESC}
+
+    """
+    A project to search for under the the owner.
+    """
+    query: String
+  ): ProjectV2Connection!
+}
+
+"""
+Recent projects for the owner.
+"""
+interface ProjectV2Recent {
+  """
+  Recent projects that this user has modified in the context of the owner.
+  """
+  recentProjects(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2Connection!
+}
+
+"""
+The possible roles of a collaborator on a project.
+"""
+enum ProjectV2Roles {
+  """
+  The collaborator can view, edit, and manage the settings of the project
+  """
+  ADMIN
+
+  """
+  The collaborator has no direct access to the project
+  """
+  NONE
+
+  """
+  The collaborator can view the project
+  """
+  READER
+
+  """
+  The collaborator can view and edit the project
+  """
+  WRITER
+}
+
+"""
+A single select field inside a project.
+"""
+type ProjectV2SingleSelectField implements Node & ProjectV2FieldCommon {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The field's type.
+  """
+  dataType: ProjectV2FieldType!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the ProjectV2SingleSelectField object
+  """
+  id: ID!
+
+  """
+  The project field's name.
+  """
+  name: String!
+
+  """
+  Options for the single select field
+  """
+  options(
+    """
+    Filter returned options to only those matching these names, case insensitive.
+    """
+    names: [String!]
+  ): [ProjectV2SingleSelectFieldOption!]!
+
+  """
+  The project that contains this field.
+  """
+  project: ProjectV2!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+Single select field option for a configuration for a project.
+"""
+type ProjectV2SingleSelectFieldOption {
+  """
+  The option's display color.
+  """
+  color: ProjectV2SingleSelectFieldOptionColor!
+
+  """
+  The option's plain-text description.
+  """
+  description: String!
+
+  """
+  The option's description, possibly containing HTML.
+  """
+  descriptionHTML: String!
+
+  """
+  The option's ID.
+  """
+  id: String!
+
+  """
+  The option's name.
+  """
+  name: String!
+
+  """
+  The option's html name.
+  """
+  nameHTML: String!
+}
+
+"""
+The display color of a single-select field option.
+"""
+enum ProjectV2SingleSelectFieldOptionColor {
+  """
+  BLUE
+  """
+  BLUE
+
+  """
+  GRAY
+  """
+  GRAY
+
+  """
+  GREEN
+  """
+  GREEN
+
+  """
+  ORANGE
+  """
+  ORANGE
+
+  """
+  PINK
+  """
+  PINK
+
+  """
+  PURPLE
+  """
+  PURPLE
+
+  """
+  RED
+  """
+  RED
+
+  """
+  YELLOW
+  """
+  YELLOW
+}
+
+"""
+Represents a single select field option
+"""
+input ProjectV2SingleSelectFieldOptionInput {
+  """
+  The display color of the option
+  """
+  color: ProjectV2SingleSelectFieldOptionColor!
+
+  """
+  The description text of the option
+  """
+  description: String!
+
+  """
+  The name of the option
+  """
+  name: String!
+}
+
+"""
+Represents a sort by field and direction.
+"""
+type ProjectV2SortBy {
+  """
+  The direction of the sorting. Possible values are ASC and DESC.
+  """
+  direction: OrderDirection!
+
+  """
+  The field by which items are sorted.
+  """
+  field: ProjectV2Field!
+}
+
+"""
+The connection type for ProjectV2SortBy.
+"""
+type ProjectV2SortByConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2SortByEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2SortBy]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectV2SortByEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2SortBy
+}
+
+"""
+Represents a sort by field and direction.
+"""
+type ProjectV2SortByField {
+  """
+  The direction of the sorting. Possible values are ASC and DESC.
+  """
+  direction: OrderDirection!
+
+  """
+  The field by which items are sorted.
+  """
+  field: ProjectV2FieldConfiguration!
+}
+
+"""
+The connection type for ProjectV2SortByField.
+"""
+type ProjectV2SortByFieldConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2SortByFieldEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2SortByField]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectV2SortByFieldEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2SortByField
+}
+
+"""
+The possible states of a project v2.
+"""
+enum ProjectV2State {
+  """
+  A project v2 that has been closed
+  """
+  CLOSED
+
+  """
+  A project v2 that is still open
+  """
+  OPEN
+}
+
+"""
+A view within a ProjectV2.
+"""
+type ProjectV2View implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The view's visible fields.
+  """
+  fields(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the project v2 fields returned from the connection.
+    """
+    orderBy: ProjectV2FieldOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2FieldConfigurationConnection
+
+  """
+  The project view's filter.
+  """
+  filter: String
+
+  """
+  The view's group-by field.
+  """
+  groupBy(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the project v2 fields returned from the connection.
+    """
+    orderBy: ProjectV2FieldOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2FieldConnection
+    @deprecated(
+      reason: "The `ProjectV2View#order_by` API is deprecated in favour of the more capable `ProjectV2View#group_by_field` API. Check out the `ProjectV2View#group_by_fields` API as an example for the more capable alternative. Removal on 2023-04-01 UTC."
+    )
+
+  """
+  The view's group-by field.
+  """
+  groupByFields(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the project v2 fields returned from the connection.
+    """
+    orderBy: ProjectV2FieldOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2FieldConfigurationConnection
+
+  """
+  The Node ID of the ProjectV2View object
+  """
+  id: ID!
+
+  """
+  The project view's layout.
+  """
+  layout: ProjectV2ViewLayout!
+
+  """
+  The project view's name.
+  """
+  name: String!
+
+  """
+  The project view's number.
+  """
+  number: Int!
+
+  """
+  The project that contains this view.
+  """
+  project: ProjectV2!
+
+  """
+  The view's sort-by config.
+  """
+  sortBy(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2SortByConnection
+    @deprecated(
+      reason: "The `ProjectV2View#sort_by` API is deprecated in favour of the more capable `ProjectV2View#sort_by_fields` API. Check out the `ProjectV2View#sort_by_fields` API as an example for the more capable alternative. Removal on 2023-04-01 UTC."
+    )
+
+  """
+  The view's sort-by config.
+  """
+  sortByFields(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2SortByFieldConnection
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The view's vertical-group-by field.
+  """
+  verticalGroupBy(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the project v2 fields returned from the connection.
+    """
+    orderBy: ProjectV2FieldOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2FieldConnection
+    @deprecated(
+      reason: "The `ProjectV2View#vertical_group_by` API is deprecated in favour of the more capable `ProjectV2View#vertical_group_by_fields` API. Check out the `ProjectV2View#vertical_group_by_fields` API as an example for the more capable alternative. Removal on 2023-04-01 UTC."
+    )
+
+  """
+  The view's vertical-group-by field.
+  """
+  verticalGroupByFields(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the project v2 fields returned from the connection.
+    """
+    orderBy: ProjectV2FieldOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2FieldConfigurationConnection
+
+  """
+  The view's visible fields.
+  """
+  visibleFields(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the project v2 fields returned from the connection.
+    """
+    orderBy: ProjectV2FieldOrder = {field: POSITION, direction: ASC}
+  ): ProjectV2FieldConnection
+    @deprecated(
+      reason: "The `ProjectV2View#visibleFields` API is deprecated in favour of the more capable `ProjectV2View#fields` API. Check out the `ProjectV2View#fields` API as an example for the more capable alternative. Removal on 2023-01-01 UTC."
+    )
+}
+
+"""
+The connection type for ProjectV2View.
+"""
+type ProjectV2ViewConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2ViewEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2View]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectV2ViewEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2View
+}
+
+"""
+The layout of a project v2 view.
+"""
+enum ProjectV2ViewLayout {
+  """
+  Board layout
+  """
+  BOARD_LAYOUT
+
+  """
+  Roadmap layout
+  """
+  ROADMAP_LAYOUT
+
+  """
+  Table layout
+  """
+  TABLE_LAYOUT
+}
+
+"""
+Ordering options for project v2 view connections
+"""
+input ProjectV2ViewOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order the project v2 views by.
+  """
+  field: ProjectV2ViewOrderField!
+}
+
+"""
+Properties by which project v2 view connections can be ordered.
+"""
+enum ProjectV2ViewOrderField {
+  """
+  Order project v2 views by creation time
+  """
+  CREATED_AT
+
+  """
+  Order project v2 views by name
+  """
+  NAME
+
+  """
+  Order project v2 views by position
+  """
+  POSITION
+}
+
+"""
+A workflow inside a project.
+"""
+type ProjectV2Workflow implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  Whether the workflow is enabled.
+  """
+  enabled: Boolean!
+
+  """
+  The Node ID of the ProjectV2Workflow object
+  """
+  id: ID!
+
+  """
+  The name of the workflow.
+  """
+  name: String!
+
+  """
+  The number of the workflow.
+  """
+  number: Int!
+
+  """
+  The project that contains this workflow.
+  """
+  project: ProjectV2!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for ProjectV2Workflow.
+"""
+type ProjectV2WorkflowConnection {
+  """
+  A list of edges.
+  """
+  edges: [ProjectV2WorkflowEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ProjectV2Workflow]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ProjectV2WorkflowEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ProjectV2Workflow
+}
+
+"""
+Ordering options for project v2 workflows connections
+"""
+input ProjectV2WorkflowOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order the project v2 workflows by.
+  """
+  field: ProjectV2WorkflowsOrderField!
+}
+
+"""
+Properties by which project workflows can be ordered.
+"""
+enum ProjectV2WorkflowsOrderField {
+  """
+  The date and time of the workflow creation
+  """
+  CREATED_AT
+
+  """
+  The name of the workflow
+  """
+  NAME
+
+  """
+  The number of the workflow
+  """
+  NUMBER
+
+  """
+  The date and time of the workflow update
+  """
+  UPDATED_AT
+}
+
+"""
+A user's public key.
+"""
+type PublicKey implements Node {
+  """
+  The last time this authorization was used to perform an action. Values will be null for keys not owned by the user.
+  """
+  accessedAt: DateTime
+
+  """
+  Identifies the date and time when the key was created. Keys created before
+  March 5th, 2014 have inaccurate values. Values will be null for keys not owned by the user.
+  """
+  createdAt: DateTime
+
+  """
+  The fingerprint for this PublicKey.
+  """
+  fingerprint: String!
+
+  """
+  The Node ID of the PublicKey object
+  """
+  id: ID!
+
+  """
+  Whether this PublicKey is read-only or not. Values will be null for keys not owned by the user.
+  """
+  isReadOnly: Boolean
+
+  """
+  The public key string.
+  """
+  key: String!
+
+  """
+  Identifies the date and time when the key was updated. Keys created before
+  March 5th, 2014 may have inaccurate values. Values will be null for keys not
+  owned by the user.
+  """
+  updatedAt: DateTime
+}
+
+"""
+The connection type for PublicKey.
+"""
+type PublicKeyConnection {
+  """
+  A list of edges.
+  """
+  edges: [PublicKeyEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PublicKey]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PublicKeyEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PublicKey
+}
+
+"""
+Autogenerated input type of PublishSponsorsTier
+"""
+input PublishSponsorsTierInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the draft tier to publish.
+  """
+  tierId: ID! @possibleTypes(concreteTypes: ["SponsorsTier"])
+}
+
+"""
+Autogenerated return type of PublishSponsorsTier
+"""
+type PublishSponsorsTierPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The tier that was published.
+  """
+  sponsorsTier: SponsorsTier
+}
+
+"""
+A repository pull request.
+"""
+type PullRequest implements Assignable & Closable & Comment & Labelable & Lockable & Node & ProjectV2Owner & Reactable & RepositoryNode & Subscribable & UniformResourceLocatable & Updatable & UpdatableComment {
+  """
+  Reason that the conversation was locked.
+  """
+  activeLockReason: LockReason
+
+  """
+  The number of additions in this pull request.
+  """
+  additions: Int!
+
+  """
+  A list of Users assigned to this object.
+  """
+  assignees(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the subject of the comment.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  Returns the auto-merge request object if one exists for this pull request.
+  """
+  autoMergeRequest: AutoMergeRequest
+
+  """
+  Identifies the base Ref associated with the pull request.
+  """
+  baseRef: Ref
+
+  """
+  Identifies the name of the base Ref associated with the pull request, even if the ref has been deleted.
+  """
+  baseRefName: String!
+
+  """
+  Identifies the oid of the base ref associated with the pull request, even if the ref has been deleted.
+  """
+  baseRefOid: GitObjectID!
+
+  """
+  The repository associated with this pull request's base Ref.
+  """
+  baseRepository: Repository
+
+  """
+  The body as Markdown.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body rendered to text.
+  """
+  bodyText: String!
+
+  """
+  Whether or not the pull request is rebaseable.
+  """
+  canBeRebased: Boolean! @preview(toggledBy: "merge-info-preview")
+
+  """
+  The number of changed files in this pull request.
+  """
+  changedFiles: Int!
+
+  """
+  The HTTP path for the checks of this pull request.
+  """
+  checksResourcePath: URI!
+
+  """
+  The HTTP URL for the checks of this pull request.
+  """
+  checksUrl: URI!
+
+  """
+  `true` if the pull request is closed
+  """
+  closed: Boolean!
+
+  """
+  Identifies the date and time when the object was closed.
+  """
+  closedAt: DateTime
+
+  """
+  List of issues that were may be closed by this pull request
+  """
+  closingIssuesReferences(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for issues returned from the connection
+    """
+    orderBy: IssueOrder
+
+    """
+    Return only manually linked Issues
+    """
+    userLinkedOnly: Boolean = false
+  ): IssueConnection
+
+  """
+  A list of comments associated with the pull request.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for issue comments returned from the connection.
+    """
+    orderBy: IssueCommentOrder
+  ): IssueCommentConnection!
+
+  """
+  A list of commits present in this pull request's head branch not present in the base branch.
+  """
+  commits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PullRequestCommitConnection!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The number of deletions in this pull request.
+  """
+  deletions: Int!
+
+  """
+  The actor who edited this pull request's body.
+  """
+  editor: Actor
+
+  """
+  Lists the files changed within this pull request.
+  """
+  files(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PullRequestChangedFileConnection
+
+  """
+  Identifies the head Ref associated with the pull request.
+  """
+  headRef: Ref
+
+  """
+  Identifies the name of the head Ref associated with the pull request, even if the ref has been deleted.
+  """
+  headRefName: String!
+
+  """
+  Identifies the oid of the head ref associated with the pull request, even if the ref has been deleted.
+  """
+  headRefOid: GitObjectID!
+
+  """
+  The repository associated with this pull request's head Ref.
+  """
+  headRepository: Repository
+
+  """
+  The owner of the repository associated with this pull request's head Ref.
+  """
+  headRepositoryOwner: RepositoryOwner
+
+  """
+  The hovercard information for this issue
+  """
+  hovercard(
+    """
+    Whether or not to include notification contexts
+    """
+    includeNotificationContexts: Boolean = true
+  ): Hovercard!
+
+  """
+  The Node ID of the PullRequest object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  The head and base repositories are different.
+  """
+  isCrossRepository: Boolean!
+
+  """
+  Identifies if the pull request is a draft.
+  """
+  isDraft: Boolean!
+
+  """
+  Is this pull request read by the viewer
+  """
+  isReadByViewer: Boolean
+
+  """
+  A list of labels associated with the object.
+  """
+  labels(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for labels returned from the connection.
+    """
+    orderBy: LabelOrder = {field: CREATED_AT, direction: ASC}
+  ): LabelConnection
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  A list of latest reviews per user associated with the pull request.
+  """
+  latestOpinionatedReviews(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Only return reviews from user who have write access to the repository
+    """
+    writersOnly: Boolean = false
+  ): PullRequestReviewConnection
+
+  """
+  A list of latest reviews per user associated with the pull request that are not also pending review.
+  """
+  latestReviews(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PullRequestReviewConnection
+
+  """
+  `true` if the pull request is locked
+  """
+  locked: Boolean!
+
+  """
+  Indicates whether maintainers can modify the pull request.
+  """
+  maintainerCanModify: Boolean!
+
+  """
+  The commit that was created when this pull request was merged.
+  """
+  mergeCommit: Commit
+
+  """
+  The merge queue entry of the pull request in the base branch's merge queue
+  """
+  mergeQueueEntry: MergeQueueEntry
+
+  """
+  Detailed information about the current pull request merge state status.
+  """
+  mergeStateStatus: MergeStateStatus! @preview(toggledBy: "merge-info-preview")
+
+  """
+  Whether or not the pull request can be merged based on the existence of merge conflicts.
+  """
+  mergeable: MergeableState!
+
+  """
+  Whether or not the pull request was merged.
+  """
+  merged: Boolean!
+
+  """
+  The date and time that the pull request was merged.
+  """
+  mergedAt: DateTime
+
+  """
+  The actor who merged the pull request.
+  """
+  mergedBy: Actor
+
+  """
+  Identifies the milestone associated with the pull request.
+  """
+  milestone: Milestone
+
+  """
+  Identifies the pull request number.
+  """
+  number: Int!
+
+  """
+  A list of Users that are participating in the Pull Request conversation.
+  """
+  participants(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+
+  """
+  The permalink to the pull request.
+  """
+  permalink: URI!
+
+  """
+  The commit that GitHub automatically generated to test if this pull request
+  could be merged. This field will not return a value if the pull request is
+  merged, or if the test merge commit is still being generated. See the
+  `mergeable` field for more details on the mergeability of the pull request.
+  """
+  potentialMergeCommit: Commit
+
+  """
+  List of project cards associated with this pull request.
+  """
+  projectCards(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    A list of archived states to filter the cards by
+    """
+    archivedStates: [ProjectCardArchivedState] = [ARCHIVED, NOT_ARCHIVED]
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectCardConnection!
+
+  """
+  List of project items associated with this pull request.
+  """
+  projectItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Include archived items.
+    """
+    includeArchived: Boolean = true
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2ItemConnection!
+
+  """
+  Find a project by number.
+  """
+  projectV2(
+    """
+    The project number.
+    """
+    number: Int!
+  ): ProjectV2
+
+  """
+  A list of projects under the owner.
+  """
+  projectsV2(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    How to order the returned projects.
+    """
+    orderBy: ProjectV2Order = {field: NUMBER, direction: DESC}
+
+    """
+    A project to search for under the the owner.
+    """
+    query: String
+  ): ProjectV2Connection!
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this pull request.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP path for reverting this pull request.
+  """
+  revertResourcePath: URI!
+
+  """
+  The HTTP URL for reverting this pull request.
+  """
+  revertUrl: URI!
+
+  """
+  The current status of this pull request with respect to code review.
+  """
+  reviewDecision: PullRequestReviewDecision
+
+  """
+  A list of review requests associated with the pull request.
+  """
+  reviewRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ReviewRequestConnection
+
+  """
+  The list of all review threads for this pull request.
+  """
+  reviewThreads(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PullRequestReviewThreadConnection!
+
+  """
+  A list of reviews associated with the pull request.
+  """
+  reviews(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Filter by author of the review.
+    """
+    author: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    A list of states to filter the reviews.
+    """
+    states: [PullRequestReviewState!]
+  ): PullRequestReviewConnection
+
+  """
+  Identifies the state of the pull request.
+  """
+  state: PullRequestState!
+
+  """
+  A list of reviewer suggestions based on commit history and past review comments.
+  """
+  suggestedReviewers: [SuggestedReviewer]!
+
+  """
+  A list of events, comments, commits, etc. associated with the pull request.
+  """
+  timeline(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows filtering timeline events by a `since` timestamp.
+    """
+    since: DateTime
+  ): PullRequestTimelineConnection!
+    @deprecated(reason: "`timeline` will be removed Use PullRequest.timelineItems instead. Removal on 2020-10-01 UTC.")
+
+  """
+  A list of events, comments, commits, etc. associated with the pull request.
+  """
+  timelineItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Filter timeline items by type.
+    """
+    itemTypes: [PullRequestTimelineItemsItemType!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter timeline items by a `since` timestamp.
+    """
+    since: DateTime
+
+    """
+    Skips the first _n_ elements in the list.
+    """
+    skip: Int
+  ): PullRequestTimelineItemsConnection!
+
+  """
+  Identifies the pull request title.
+  """
+  title: String!
+
+  """
+  Identifies the pull request title rendered to HTML.
+  """
+  titleHTML: HTML!
+
+  """
+  Returns a count of how many comments this pull request has received.
+  """
+  totalCommentsCount: Int
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this pull request.
+  """
+  url: URI!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Whether or not the viewer can apply suggestion.
+  """
+  viewerCanApplySuggestion: Boolean!
+
+  """
+  Indicates if the object can be closed by the viewer.
+  """
+  viewerCanClose: Boolean!
+
+  """
+  Check if the viewer can restore the deleted head ref.
+  """
+  viewerCanDeleteHeadRef: Boolean!
+
+  """
+  Whether or not the viewer can disable auto-merge
+  """
+  viewerCanDisableAutoMerge: Boolean!
+
+  """
+  Can the viewer edit files within this pull request.
+  """
+  viewerCanEditFiles: Boolean!
+
+  """
+  Whether or not the viewer can enable auto-merge
+  """
+  viewerCanEnableAutoMerge: Boolean!
+
+  """
+  Indicates whether the viewer can bypass branch protections and merge the pull request immediately
+  """
+  viewerCanMergeAsAdmin: Boolean!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Indicates if the object can be reopened by the viewer.
+  """
+  viewerCanReopen: Boolean!
+
+  """
+  Check if the viewer is able to change their subscription status for the repository.
+  """
+  viewerCanSubscribe: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Whether or not the viewer can update the head ref of this PR, by merging or rebasing the base ref.
+  If the head ref is up to date or unable to be updated by this user, this will return false.
+  """
+  viewerCanUpdateBranch: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+
+  """
+  The latest review given from the viewer.
+  """
+  viewerLatestReview: PullRequestReview
+
+  """
+  The person who has requested the viewer for review on this pull request.
+  """
+  viewerLatestReviewRequest: ReviewRequest
+
+  """
+  The merge body text for the viewer and method.
+  """
+  viewerMergeBodyText(
+    """
+    The merge method for the message.
+    """
+    mergeType: PullRequestMergeMethod
+  ): String!
+
+  """
+  The merge headline text for the viewer and method.
+  """
+  viewerMergeHeadlineText(
+    """
+    The merge method for the message.
+    """
+    mergeType: PullRequestMergeMethod
+  ): String!
+
+  """
+  Identifies if the viewer is watching, not watching, or ignoring the subscribable entity.
+  """
+  viewerSubscription: SubscriptionState
+}
+
+"""
+The possible methods for updating a pull request's head branch with the base branch.
+"""
+enum PullRequestBranchUpdateMethod {
+  """
+  Update branch via merge
+  """
+  MERGE
+
+  """
+  Update branch via rebase
+  """
+  REBASE
+}
+
+"""
+A file changed in a pull request.
+"""
+type PullRequestChangedFile {
+  """
+  The number of additions to the file.
+  """
+  additions: Int!
+
+  """
+  How the file was changed in this PullRequest
+  """
+  changeType: PatchStatus!
+
+  """
+  The number of deletions to the file.
+  """
+  deletions: Int!
+
+  """
+  The path of the file.
+  """
+  path: String!
+
+  """
+  The state of the file for the viewer.
+  """
+  viewerViewedState: FileViewedState!
+}
+
+"""
+The connection type for PullRequestChangedFile.
+"""
+type PullRequestChangedFileConnection {
+  """
+  A list of edges.
+  """
+  edges: [PullRequestChangedFileEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PullRequestChangedFile]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PullRequestChangedFileEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PullRequestChangedFile
+}
+
+"""
+Represents a Git commit part of a pull request.
+"""
+type PullRequestCommit implements Node & UniformResourceLocatable {
+  """
+  The Git commit object
+  """
+  commit: Commit!
+
+  """
+  The Node ID of the PullRequestCommit object
+  """
+  id: ID!
+
+  """
+  The pull request this commit belongs to
+  """
+  pullRequest: PullRequest!
+
+  """
+  The HTTP path for this pull request commit
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this pull request commit
+  """
+  url: URI!
+}
+
+"""
+Represents a commit comment thread part of a pull request.
+"""
+type PullRequestCommitCommentThread implements Node & RepositoryNode {
+  """
+  The comments that exist in this thread.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CommitCommentConnection!
+
+  """
+  The commit the comments were made on.
+  """
+  commit: Commit!
+
+  """
+  The Node ID of the PullRequestCommitCommentThread object
+  """
+  id: ID!
+
+  """
+  The file the comments were made on.
+  """
+  path: String
+
+  """
+  The position in the diff for the commit that the comment was made on.
+  """
+  position: Int
+
+  """
+  The pull request this commit comment thread belongs to
+  """
+  pullRequest: PullRequest!
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+}
+
+"""
+The connection type for PullRequestCommit.
+"""
+type PullRequestCommitConnection {
+  """
+  A list of edges.
+  """
+  edges: [PullRequestCommitEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PullRequestCommit]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PullRequestCommitEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PullRequestCommit
+}
+
+"""
+The connection type for PullRequest.
+"""
+type PullRequestConnection {
+  """
+  A list of edges.
+  """
+  edges: [PullRequestEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PullRequest]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+This aggregates pull requests opened by a user within one repository.
+"""
+type PullRequestContributionsByRepository {
+  """
+  The pull request contributions.
+  """
+  contributions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for contributions returned from the connection.
+    """
+    orderBy: ContributionOrder = {direction: DESC}
+  ): CreatedPullRequestContributionConnection!
+
+  """
+  The repository in which the pull requests were opened.
+  """
+  repository: Repository!
+}
+
+"""
+An edge in a connection.
+"""
+type PullRequestEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PullRequest
+}
+
+"""
+Represents available types of methods to use when merging a pull request.
+"""
+enum PullRequestMergeMethod {
+  """
+  Add all commits from the head branch to the base branch with a merge commit.
+  """
+  MERGE
+
+  """
+  Add all commits from the head branch onto the base branch individually.
+  """
+  REBASE
+
+  """
+  Combine all commits from the head branch into a single commit in the base branch.
+  """
+  SQUASH
+}
+
+"""
+Ways in which lists of issues can be ordered upon return.
+"""
+input PullRequestOrder {
+  """
+  The direction in which to order pull requests by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order pull requests by.
+  """
+  field: PullRequestOrderField!
+}
+
+"""
+Properties by which pull_requests connections can be ordered.
+"""
+enum PullRequestOrderField {
+  """
+  Order pull_requests by creation time
+  """
+  CREATED_AT
+
+  """
+  Order pull_requests by update time
+  """
+  UPDATED_AT
+}
+
+"""
+Require all commits be made to a non-target branch and submitted via a pull request before they can be merged.
+"""
+type PullRequestParameters {
+  """
+  New, reviewable commits pushed will dismiss previous pull request review approvals.
+  """
+  dismissStaleReviewsOnPush: Boolean!
+
+  """
+  Require an approving review in pull requests that modify files that have a designated code owner.
+  """
+  requireCodeOwnerReview: Boolean!
+
+  """
+  Whether the most recent reviewable push must be approved by someone other than the person who pushed it.
+  """
+  requireLastPushApproval: Boolean!
+
+  """
+  The number of approving reviews that are required before a pull request can be merged.
+  """
+  requiredApprovingReviewCount: Int!
+
+  """
+  All conversations on code must be resolved before a pull request can be merged.
+  """
+  requiredReviewThreadResolution: Boolean!
+}
+
+"""
+Require all commits be made to a non-target branch and submitted via a pull request before they can be merged.
+"""
+input PullRequestParametersInput {
+  """
+  New, reviewable commits pushed will dismiss previous pull request review approvals.
+  """
+  dismissStaleReviewsOnPush: Boolean!
+
+  """
+  Require an approving review in pull requests that modify files that have a designated code owner.
+  """
+  requireCodeOwnerReview: Boolean!
+
+  """
+  Whether the most recent reviewable push must be approved by someone other than the person who pushed it.
+  """
+  requireLastPushApproval: Boolean!
+
+  """
+  The number of approving reviews that are required before a pull request can be merged.
+  """
+  requiredApprovingReviewCount: Int!
+
+  """
+  All conversations on code must be resolved before a pull request can be merged.
+  """
+  requiredReviewThreadResolution: Boolean!
+}
+
+"""
+A review object for a given pull request.
+"""
+type PullRequestReview implements Comment & Deletable & Minimizable & Node & Reactable & RepositoryNode & Updatable & UpdatableComment {
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the subject of the comment.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  Indicates whether the author of this review has push access to the repository.
+  """
+  authorCanPushToRepository: Boolean!
+
+  """
+  Identifies the pull request review body.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body of this review rendered as plain text.
+  """
+  bodyText: String!
+
+  """
+  A list of review comments for the current pull request review.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PullRequestReviewCommentConnection!
+
+  """
+  Identifies the commit associated with this pull request review.
+  """
+  commit: Commit
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  The Node ID of the PullRequestReview object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  Returns whether or not a comment has been minimized.
+  """
+  isMinimized: Boolean!
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  Returns why the comment was minimized. One of `abuse`, `off-topic`,
+  `outdated`, `resolved`, `duplicate` and `spam`. Note that the case and
+  formatting of these values differs from the inputs to the `MinimizeComment` mutation.
+  """
+  minimizedReason: String
+
+  """
+  A list of teams that this review was made on behalf of.
+  """
+  onBehalfOf(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): TeamConnection!
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  Identifies the pull request associated with this pull request review.
+  """
+  pullRequest: PullRequest!
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path permalink for this PullRequestReview.
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the current state of the pull request review.
+  """
+  state: PullRequestReviewState!
+
+  """
+  Identifies when the Pull Request Review was submitted
+  """
+  submittedAt: DateTime
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL permalink for this PullRequestReview.
+  """
+  url: URI!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Check if the current viewer can minimize this object.
+  """
+  viewerCanMinimize: Boolean!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+}
+
+"""
+A review comment associated with a given repository pull request.
+"""
+type PullRequestReviewComment implements Comment & Deletable & Minimizable & Node & Reactable & RepositoryNode & Updatable & UpdatableComment {
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the subject of the comment.
+  """
+  authorAssociation: CommentAuthorAssociation!
+
+  """
+  The comment body of this review comment.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The comment body of this review comment rendered as plain text.
+  """
+  bodyText: String!
+
+  """
+  Identifies the commit associated with the comment.
+  """
+  commit: Commit
+
+  """
+  Identifies when the comment was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The diff hunk to which the comment applies.
+  """
+  diffHunk: String!
+
+  """
+  Identifies when the comment was created in a draft state.
+  """
+  draftedAt: DateTime!
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  The Node ID of the PullRequestReviewComment object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  Returns whether or not a comment has been minimized.
+  """
+  isMinimized: Boolean!
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  The end line number on the file to which the comment applies
+  """
+  line: Int
+
+  """
+  Returns why the comment was minimized. One of `abuse`, `off-topic`,
+  `outdated`, `resolved`, `duplicate` and `spam`. Note that the case and
+  formatting of these values differs from the inputs to the `MinimizeComment` mutation.
+  """
+  minimizedReason: String
+
+  """
+  Identifies the original commit associated with the comment.
+  """
+  originalCommit: Commit
+
+  """
+  The end line number on the file to which the comment applied when it was first created
+  """
+  originalLine: Int
+
+  """
+  The original line index in the diff to which the comment applies.
+  """
+  originalPosition: Int!
+    @deprecated(reason: "We are phasing out diff-relative positioning for PR comments Removal on 2023-10-01 UTC.")
+
+  """
+  The start line number on the file to which the comment applied when it was first created
+  """
+  originalStartLine: Int
+
+  """
+  Identifies when the comment body is outdated
+  """
+  outdated: Boolean!
+
+  """
+  The path to which the comment applies.
+  """
+  path: String!
+
+  """
+  The line index in the diff to which the comment applies.
+  """
+  position: Int
+    @deprecated(
+      reason: "We are phasing out diff-relative positioning for PR comments Use the `line` and `startLine` fields instead, which are file line numbers instead of diff line numbers Removal on 2023-10-01 UTC."
+    )
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  The pull request associated with this review comment.
+  """
+  pullRequest: PullRequest!
+
+  """
+  The pull request review associated with this review comment.
+  """
+  pullRequestReview: PullRequestReview
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The comment this is a reply to.
+  """
+  replyTo: PullRequestReviewComment
+
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path permalink for this review comment.
+  """
+  resourcePath: URI!
+
+  """
+  The start line number on the file to which the comment applies
+  """
+  startLine: Int
+
+  """
+  Identifies the state of the comment.
+  """
+  state: PullRequestReviewCommentState!
+
+  """
+  The level at which the comments in the corresponding thread are targeted, can be a diff line or a file
+  """
+  subjectType: PullRequestReviewThreadSubjectType!
+
+  """
+  Identifies when the comment was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL permalink for this review comment.
+  """
+  url: URI!
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Check if the current viewer can minimize this object.
+  """
+  viewerCanMinimize: Boolean!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+}
+
+"""
+The connection type for PullRequestReviewComment.
+"""
+type PullRequestReviewCommentConnection {
+  """
+  A list of edges.
+  """
+  edges: [PullRequestReviewCommentEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PullRequestReviewComment]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PullRequestReviewCommentEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PullRequestReviewComment
+}
+
+"""
+The possible states of a pull request review comment.
+"""
+enum PullRequestReviewCommentState {
+  """
+  A comment that is part of a pending review
+  """
+  PENDING
+
+  """
+  A comment that is part of a submitted review
+  """
+  SUBMITTED
+}
+
+"""
+The connection type for PullRequestReview.
+"""
+type PullRequestReviewConnection {
+  """
+  A list of edges.
+  """
+  edges: [PullRequestReviewEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PullRequestReview]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+This aggregates pull request reviews made by a user within one repository.
+"""
+type PullRequestReviewContributionsByRepository {
+  """
+  The pull request review contributions.
+  """
+  contributions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for contributions returned from the connection.
+    """
+    orderBy: ContributionOrder = {direction: DESC}
+  ): CreatedPullRequestReviewContributionConnection!
+
+  """
+  The repository in which the pull request reviews were made.
+  """
+  repository: Repository!
+}
+
+"""
+The review status of a pull request.
+"""
+enum PullRequestReviewDecision {
+  """
+  The pull request has received an approving review.
+  """
+  APPROVED
+
+  """
+  Changes have been requested on the pull request.
+  """
+  CHANGES_REQUESTED
+
+  """
+  A review is required before the pull request can be merged.
+  """
+  REVIEW_REQUIRED
+}
+
+"""
+An edge in a connection.
+"""
+type PullRequestReviewEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PullRequestReview
+}
+
+"""
+The possible events to perform on a pull request review.
+"""
+enum PullRequestReviewEvent {
+  """
+  Submit feedback and approve merging these changes.
+  """
+  APPROVE
+
+  """
+  Submit general feedback without explicit approval.
+  """
+  COMMENT
+
+  """
+  Dismiss review so it now longer effects merging.
+  """
+  DISMISS
+
+  """
+  Submit feedback that must be addressed before merging.
+  """
+  REQUEST_CHANGES
+}
+
+"""
+The possible states of a pull request review.
+"""
+enum PullRequestReviewState {
+  """
+  A review allowing the pull request to merge.
+  """
+  APPROVED
+
+  """
+  A review blocking the pull request from merging.
+  """
+  CHANGES_REQUESTED
+
+  """
+  An informational review.
+  """
+  COMMENTED
+
+  """
+  A review that has been dismissed.
+  """
+  DISMISSED
+
+  """
+  A review that has not yet been submitted.
+  """
+  PENDING
+}
+
+"""
+A threaded list of comments for a given pull request.
+"""
+type PullRequestReviewThread implements Node {
+  """
+  A list of pull request comments associated with the thread.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Skips the first _n_ elements in the list.
+    """
+    skip: Int
+  ): PullRequestReviewCommentConnection!
+
+  """
+  The side of the diff on which this thread was placed.
+  """
+  diffSide: DiffSide!
+
+  """
+  The Node ID of the PullRequestReviewThread object
+  """
+  id: ID!
+
+  """
+  Whether or not the thread has been collapsed (resolved)
+  """
+  isCollapsed: Boolean!
+
+  """
+  Indicates whether this thread was outdated by newer changes.
+  """
+  isOutdated: Boolean!
+
+  """
+  Whether this thread has been resolved
+  """
+  isResolved: Boolean!
+
+  """
+  The line in the file to which this thread refers
+  """
+  line: Int
+
+  """
+  The original line in the file to which this thread refers.
+  """
+  originalLine: Int
+
+  """
+  The original start line in the file to which this thread refers (multi-line only).
+  """
+  originalStartLine: Int
+
+  """
+  Identifies the file path of this thread.
+  """
+  path: String!
+
+  """
+  Identifies the pull request associated with this thread.
+  """
+  pullRequest: PullRequest!
+
+  """
+  Identifies the repository associated with this thread.
+  """
+  repository: Repository!
+
+  """
+  The user who resolved this thread
+  """
+  resolvedBy: User
+
+  """
+  The side of the diff that the first line of the thread starts on (multi-line only)
+  """
+  startDiffSide: DiffSide
+
+  """
+  The start line in the file to which this thread refers (multi-line only)
+  """
+  startLine: Int
+
+  """
+  The level at which the comments in the corresponding thread are targeted, can be a diff line or a file
+  """
+  subjectType: PullRequestReviewThreadSubjectType!
+
+  """
+  Indicates whether the current viewer can reply to this thread.
+  """
+  viewerCanReply: Boolean!
+
+  """
+  Whether or not the viewer can resolve this thread
+  """
+  viewerCanResolve: Boolean!
+
+  """
+  Whether or not the viewer can unresolve this thread
+  """
+  viewerCanUnresolve: Boolean!
+}
+
+"""
+Review comment threads for a pull request review.
+"""
+type PullRequestReviewThreadConnection {
+  """
+  A list of edges.
+  """
+  edges: [PullRequestReviewThreadEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PullRequestReviewThread]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PullRequestReviewThreadEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PullRequestReviewThread
+}
+
+"""
+The possible subject types of a pull request review comment.
+"""
+enum PullRequestReviewThreadSubjectType {
+  """
+  A comment that has been made against the file of a pull request
+  """
+  FILE
+
+  """
+  A comment that has been made against the line of a pull request
+  """
+  LINE
+}
+
+"""
+Represents the latest point in the pull request timeline for which the viewer has seen the pull request's commits.
+"""
+type PullRequestRevisionMarker {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The last commit the viewer has seen.
+  """
+  lastSeenCommit: Commit!
+
+  """
+  The pull request to which the marker belongs.
+  """
+  pullRequest: PullRequest!
+}
+
+"""
+The possible states of a pull request.
+"""
+enum PullRequestState {
+  """
+  A pull request that has been closed without being merged.
+  """
+  CLOSED
+
+  """
+  A pull request that has been closed by being merged.
+  """
+  MERGED
+
+  """
+  A pull request that is still open.
+  """
+  OPEN
+}
+
+"""
+A repository pull request template.
+"""
+type PullRequestTemplate {
+  """
+  The body of the template
+  """
+  body: String
+
+  """
+  The filename of the template
+  """
+  filename: String
+
+  """
+  The repository the template belongs to
+  """
+  repository: Repository!
+}
+
+"""
+A threaded list of comments for a given pull request.
+"""
+type PullRequestThread implements Node {
+  """
+  A list of pull request comments associated with the thread.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Skips the first _n_ elements in the list.
+    """
+    skip: Int
+  ): PullRequestReviewCommentConnection!
+
+  """
+  The side of the diff on which this thread was placed.
+  """
+  diffSide: DiffSide!
+
+  """
+  The Node ID of the PullRequestThread object
+  """
+  id: ID!
+
+  """
+  Whether or not the thread has been collapsed (resolved)
+  """
+  isCollapsed: Boolean!
+
+  """
+  Indicates whether this thread was outdated by newer changes.
+  """
+  isOutdated: Boolean!
+
+  """
+  Whether this thread has been resolved
+  """
+  isResolved: Boolean!
+
+  """
+  The line in the file to which this thread refers
+  """
+  line: Int
+
+  """
+  Identifies the file path of this thread.
+  """
+  path: String!
+
+  """
+  Identifies the pull request associated with this thread.
+  """
+  pullRequest: PullRequest!
+
+  """
+  Identifies the repository associated with this thread.
+  """
+  repository: Repository!
+
+  """
+  The user who resolved this thread
+  """
+  resolvedBy: User
+
+  """
+  The side of the diff that the first line of the thread starts on (multi-line only)
+  """
+  startDiffSide: DiffSide
+
+  """
+  The line of the first file diff in the thread.
+  """
+  startLine: Int
+
+  """
+  The level at which the comments in the corresponding thread are targeted, can be a diff line or a file
+  """
+  subjectType: PullRequestReviewThreadSubjectType!
+
+  """
+  Indicates whether the current viewer can reply to this thread.
+  """
+  viewerCanReply: Boolean!
+
+  """
+  Whether or not the viewer can resolve this thread
+  """
+  viewerCanResolve: Boolean!
+
+  """
+  Whether or not the viewer can unresolve this thread
+  """
+  viewerCanUnresolve: Boolean!
+}
+
+"""
+The connection type for PullRequestTimelineItem.
+"""
+type PullRequestTimelineConnection {
+  """
+  A list of edges.
+  """
+  edges: [PullRequestTimelineItemEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PullRequestTimelineItem]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An item in a pull request timeline
+"""
+union PullRequestTimelineItem =
+    AssignedEvent
+  | BaseRefDeletedEvent
+  | BaseRefForcePushedEvent
+  | ClosedEvent
+  | Commit
+  | CommitCommentThread
+  | CrossReferencedEvent
+  | DemilestonedEvent
+  | DeployedEvent
+  | DeploymentEnvironmentChangedEvent
+  | HeadRefDeletedEvent
+  | HeadRefForcePushedEvent
+  | HeadRefRestoredEvent
+  | IssueComment
+  | LabeledEvent
+  | LockedEvent
+  | MergedEvent
+  | MilestonedEvent
+  | PullRequestReview
+  | PullRequestReviewComment
+  | PullRequestReviewThread
+  | ReferencedEvent
+  | RenamedTitleEvent
+  | ReopenedEvent
+  | ReviewDismissedEvent
+  | ReviewRequestRemovedEvent
+  | ReviewRequestedEvent
+  | SubscribedEvent
+  | UnassignedEvent
+  | UnlabeledEvent
+  | UnlockedEvent
+  | UnsubscribedEvent
+  | UserBlockedEvent
+
+"""
+An edge in a connection.
+"""
+type PullRequestTimelineItemEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PullRequestTimelineItem
+}
+
+"""
+An item in a pull request timeline
+"""
+union PullRequestTimelineItems =
+    AddedToMergeQueueEvent
+  | AddedToProjectEvent
+  | AssignedEvent
+  | AutoMergeDisabledEvent
+  | AutoMergeEnabledEvent
+  | AutoRebaseEnabledEvent
+  | AutoSquashEnabledEvent
+  | AutomaticBaseChangeFailedEvent
+  | AutomaticBaseChangeSucceededEvent
+  | BaseRefChangedEvent
+  | BaseRefDeletedEvent
+  | BaseRefForcePushedEvent
+  | ClosedEvent
+  | CommentDeletedEvent
+  | ConnectedEvent
+  | ConvertToDraftEvent
+  | ConvertedNoteToIssueEvent
+  | ConvertedToDiscussionEvent
+  | CrossReferencedEvent
+  | DemilestonedEvent
+  | DeployedEvent
+  | DeploymentEnvironmentChangedEvent
+  | DisconnectedEvent
+  | HeadRefDeletedEvent
+  | HeadRefForcePushedEvent
+  | HeadRefRestoredEvent
+  | IssueComment
+  | LabeledEvent
+  | LockedEvent
+  | MarkedAsDuplicateEvent
+  | MentionedEvent
+  | MergedEvent
+  | MilestonedEvent
+  | MovedColumnsInProjectEvent
+  | PinnedEvent
+  | PullRequestCommit
+  | PullRequestCommitCommentThread
+  | PullRequestReview
+  | PullRequestReviewThread
+  | PullRequestRevisionMarker
+  | ReadyForReviewEvent
+  | ReferencedEvent
+  | RemovedFromMergeQueueEvent
+  | RemovedFromProjectEvent
+  | RenamedTitleEvent
+  | ReopenedEvent
+  | ReviewDismissedEvent
+  | ReviewRequestRemovedEvent
+  | ReviewRequestedEvent
+  | SubscribedEvent
+  | TransferredEvent
+  | UnassignedEvent
+  | UnlabeledEvent
+  | UnlockedEvent
+  | UnmarkedAsDuplicateEvent
+  | UnpinnedEvent
+  | UnsubscribedEvent
+  | UserBlockedEvent
+
+"""
+The connection type for PullRequestTimelineItems.
+"""
+type PullRequestTimelineItemsConnection {
+  """
+  A list of edges.
+  """
+  edges: [PullRequestTimelineItemsEdge]
+
+  """
+  Identifies the count of items after applying `before` and `after` filters.
+  """
+  filteredCount: Int!
+
+  """
+  A list of nodes.
+  """
+  nodes: [PullRequestTimelineItems]
+
+  """
+  Identifies the count of items after applying `before`/`after` filters and `first`/`last`/`skip` slicing.
+  """
+  pageCount: Int!
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+
+  """
+  Identifies the date and time when the timeline was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+An edge in a connection.
+"""
+type PullRequestTimelineItemsEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PullRequestTimelineItems
+}
+
+"""
+The possible item types found in a timeline.
+"""
+enum PullRequestTimelineItemsItemType {
+  """
+  Represents an 'added_to_merge_queue' event on a given pull request.
+  """
+  ADDED_TO_MERGE_QUEUE_EVENT
+
+  """
+  Represents a 'added_to_project' event on a given issue or pull request.
+  """
+  ADDED_TO_PROJECT_EVENT
+
+  """
+  Represents an 'assigned' event on any assignable object.
+  """
+  ASSIGNED_EVENT
+
+  """
+  Represents a 'automatic_base_change_failed' event on a given pull request.
+  """
+  AUTOMATIC_BASE_CHANGE_FAILED_EVENT
+
+  """
+  Represents a 'automatic_base_change_succeeded' event on a given pull request.
+  """
+  AUTOMATIC_BASE_CHANGE_SUCCEEDED_EVENT
+
+  """
+  Represents a 'auto_merge_disabled' event on a given pull request.
+  """
+  AUTO_MERGE_DISABLED_EVENT
+
+  """
+  Represents a 'auto_merge_enabled' event on a given pull request.
+  """
+  AUTO_MERGE_ENABLED_EVENT
+
+  """
+  Represents a 'auto_rebase_enabled' event on a given pull request.
+  """
+  AUTO_REBASE_ENABLED_EVENT
+
+  """
+  Represents a 'auto_squash_enabled' event on a given pull request.
+  """
+  AUTO_SQUASH_ENABLED_EVENT
+
+  """
+  Represents a 'base_ref_changed' event on a given issue or pull request.
+  """
+  BASE_REF_CHANGED_EVENT
+
+  """
+  Represents a 'base_ref_deleted' event on a given pull request.
+  """
+  BASE_REF_DELETED_EVENT
+
+  """
+  Represents a 'base_ref_force_pushed' event on a given pull request.
+  """
+  BASE_REF_FORCE_PUSHED_EVENT
+
+  """
+  Represents a 'closed' event on any `Closable`.
+  """
+  CLOSED_EVENT
+
+  """
+  Represents a 'comment_deleted' event on a given issue or pull request.
+  """
+  COMMENT_DELETED_EVENT
+
+  """
+  Represents a 'connected' event on a given issue or pull request.
+  """
+  CONNECTED_EVENT
+
+  """
+  Represents a 'converted_note_to_issue' event on a given issue or pull request.
+  """
+  CONVERTED_NOTE_TO_ISSUE_EVENT
+
+  """
+  Represents a 'converted_to_discussion' event on a given issue.
+  """
+  CONVERTED_TO_DISCUSSION_EVENT
+
+  """
+  Represents a 'convert_to_draft' event on a given pull request.
+  """
+  CONVERT_TO_DRAFT_EVENT
+
+  """
+  Represents a mention made by one issue or pull request to another.
+  """
+  CROSS_REFERENCED_EVENT
+
+  """
+  Represents a 'demilestoned' event on a given issue or pull request.
+  """
+  DEMILESTONED_EVENT
+
+  """
+  Represents a 'deployed' event on a given pull request.
+  """
+  DEPLOYED_EVENT
+
+  """
+  Represents a 'deployment_environment_changed' event on a given pull request.
+  """
+  DEPLOYMENT_ENVIRONMENT_CHANGED_EVENT
+
+  """
+  Represents a 'disconnected' event on a given issue or pull request.
+  """
+  DISCONNECTED_EVENT
+
+  """
+  Represents a 'head_ref_deleted' event on a given pull request.
+  """
+  HEAD_REF_DELETED_EVENT
+
+  """
+  Represents a 'head_ref_force_pushed' event on a given pull request.
+  """
+  HEAD_REF_FORCE_PUSHED_EVENT
+
+  """
+  Represents a 'head_ref_restored' event on a given pull request.
+  """
+  HEAD_REF_RESTORED_EVENT
+
+  """
+  Represents a comment on an Issue.
+  """
+  ISSUE_COMMENT
+
+  """
+  Represents a 'labeled' event on a given issue or pull request.
+  """
+  LABELED_EVENT
+
+  """
+  Represents a 'locked' event on a given issue or pull request.
+  """
+  LOCKED_EVENT
+
+  """
+  Represents a 'marked_as_duplicate' event on a given issue or pull request.
+  """
+  MARKED_AS_DUPLICATE_EVENT
+
+  """
+  Represents a 'mentioned' event on a given issue or pull request.
+  """
+  MENTIONED_EVENT
+
+  """
+  Represents a 'merged' event on a given pull request.
+  """
+  MERGED_EVENT
+
+  """
+  Represents a 'milestoned' event on a given issue or pull request.
+  """
+  MILESTONED_EVENT
+
+  """
+  Represents a 'moved_columns_in_project' event on a given issue or pull request.
+  """
+  MOVED_COLUMNS_IN_PROJECT_EVENT
+
+  """
+  Represents a 'pinned' event on a given issue or pull request.
+  """
+  PINNED_EVENT
+
+  """
+  Represents a Git commit part of a pull request.
+  """
+  PULL_REQUEST_COMMIT
+
+  """
+  Represents a commit comment thread part of a pull request.
+  """
+  PULL_REQUEST_COMMIT_COMMENT_THREAD
+
+  """
+  A review object for a given pull request.
+  """
+  PULL_REQUEST_REVIEW
+
+  """
+  A threaded list of comments for a given pull request.
+  """
+  PULL_REQUEST_REVIEW_THREAD
+
+  """
+  Represents the latest point in the pull request timeline for which the viewer has seen the pull request's commits.
+  """
+  PULL_REQUEST_REVISION_MARKER
+
+  """
+  Represents a 'ready_for_review' event on a given pull request.
+  """
+  READY_FOR_REVIEW_EVENT
+
+  """
+  Represents a 'referenced' event on a given `ReferencedSubject`.
+  """
+  REFERENCED_EVENT
+
+  """
+  Represents a 'removed_from_merge_queue' event on a given pull request.
+  """
+  REMOVED_FROM_MERGE_QUEUE_EVENT
+
+  """
+  Represents a 'removed_from_project' event on a given issue or pull request.
+  """
+  REMOVED_FROM_PROJECT_EVENT
+
+  """
+  Represents a 'renamed' event on a given issue or pull request
+  """
+  RENAMED_TITLE_EVENT
+
+  """
+  Represents a 'reopened' event on any `Closable`.
+  """
+  REOPENED_EVENT
+
+  """
+  Represents a 'review_dismissed' event on a given issue or pull request.
+  """
+  REVIEW_DISMISSED_EVENT
+
+  """
+  Represents an 'review_requested' event on a given pull request.
+  """
+  REVIEW_REQUESTED_EVENT
+
+  """
+  Represents an 'review_request_removed' event on a given pull request.
+  """
+  REVIEW_REQUEST_REMOVED_EVENT
+
+  """
+  Represents a 'subscribed' event on a given `Subscribable`.
+  """
+  SUBSCRIBED_EVENT
+
+  """
+  Represents a 'transferred' event on a given issue or pull request.
+  """
+  TRANSFERRED_EVENT
+
+  """
+  Represents an 'unassigned' event on any assignable object.
+  """
+  UNASSIGNED_EVENT
+
+  """
+  Represents an 'unlabeled' event on a given issue or pull request.
+  """
+  UNLABELED_EVENT
+
+  """
+  Represents an 'unlocked' event on a given issue or pull request.
+  """
+  UNLOCKED_EVENT
+
+  """
+  Represents an 'unmarked_as_duplicate' event on a given issue or pull request.
+  """
+  UNMARKED_AS_DUPLICATE_EVENT
+
+  """
+  Represents an 'unpinned' event on a given issue or pull request.
+  """
+  UNPINNED_EVENT
+
+  """
+  Represents an 'unsubscribed' event on a given `Subscribable`.
+  """
+  UNSUBSCRIBED_EVENT
+
+  """
+  Represents a 'user_blocked' event on a given user.
+  """
+  USER_BLOCKED_EVENT
+}
+
+"""
+The possible target states when updating a pull request.
+"""
+enum PullRequestUpdateState {
+  """
+  A pull request that has been closed without being merged.
+  """
+  CLOSED
+
+  """
+  A pull request that is still open.
+  """
+  OPEN
+}
+
+"""
+A Git push.
+"""
+type Push implements Node {
+  """
+  The Node ID of the Push object
+  """
+  id: ID!
+
+  """
+  The SHA after the push
+  """
+  nextSha: GitObjectID
+
+  """
+  The permalink for this push.
+  """
+  permalink: URI!
+
+  """
+  The SHA before the push
+  """
+  previousSha: GitObjectID
+
+  """
+  The actor who pushed
+  """
+  pusher: Actor!
+
+  """
+  The repository that was pushed to
+  """
+  repository: Repository!
+}
+
+"""
+A team, user, or app who has the ability to push to a protected branch.
+"""
+type PushAllowance implements Node {
+  """
+  The actor that can push.
+  """
+  actor: PushAllowanceActor
+
+  """
+  Identifies the branch protection rule associated with the allowed user, team, or app.
+  """
+  branchProtectionRule: BranchProtectionRule
+
+  """
+  The Node ID of the PushAllowance object
+  """
+  id: ID!
+}
+
+"""
+Types that can be an actor.
+"""
+union PushAllowanceActor = App | Team | User
+
+"""
+The connection type for PushAllowance.
+"""
+type PushAllowanceConnection {
+  """
+  A list of edges.
+  """
+  edges: [PushAllowanceEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [PushAllowance]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type PushAllowanceEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: PushAllowance
+}
+
+"""
+The query root of GitHub's GraphQL interface.
+"""
+type Query {
+  """
+  Look up a code of conduct by its key
+  """
+  codeOfConduct(
+    """
+    The code of conduct's key
+    """
+    key: String!
+  ): CodeOfConduct
+
+  """
+  Look up a code of conduct by its key
+  """
+  codesOfConduct: [CodeOfConduct]
+
+  """
+  Look up an enterprise by URL slug.
+  """
+  enterprise(
+    """
+    The enterprise invitation token.
+    """
+    invitationToken: String
+
+    """
+    The enterprise URL slug.
+    """
+    slug: String!
+  ): Enterprise
+
+  """
+  Look up a pending enterprise administrator invitation by invitee, enterprise and role.
+  """
+  enterpriseAdministratorInvitation(
+    """
+    The slug of the enterprise the user was invited to join.
+    """
+    enterpriseSlug: String!
+
+    """
+    The role for the business member invitation.
+    """
+    role: EnterpriseAdministratorRole!
+
+    """
+    The login of the user invited to join the business.
+    """
+    userLogin: String!
+  ): EnterpriseAdministratorInvitation
+
+  """
+  Look up a pending enterprise administrator invitation by invitation token.
+  """
+  enterpriseAdministratorInvitationByToken(
+    """
+    The invitation token sent with the invitation email.
+    """
+    invitationToken: String!
+  ): EnterpriseAdministratorInvitation
+
+  """
+  Look up an open source license by its key
+  """
+  license(
+    """
+    The license's downcased SPDX ID
+    """
+    key: String!
+  ): License
+
+  """
+  Return a list of known open source licenses
+  """
+  licenses: [License]!
+
+  """
+  Get alphabetically sorted list of Marketplace categories
+  """
+  marketplaceCategories(
+    """
+    Exclude categories with no listings.
+    """
+    excludeEmpty: Boolean
+
+    """
+    Returns top level categories only, excluding any subcategories.
+    """
+    excludeSubcategories: Boolean
+
+    """
+    Return only the specified categories.
+    """
+    includeCategories: [String!]
+  ): [MarketplaceCategory!]!
+
+  """
+  Look up a Marketplace category by its slug.
+  """
+  marketplaceCategory(
+    """
+    The URL slug of the category.
+    """
+    slug: String!
+
+    """
+    Also check topic aliases for the category slug
+    """
+    useTopicAliases: Boolean
+  ): MarketplaceCategory
+
+  """
+  Look up a single Marketplace listing
+  """
+  marketplaceListing(
+    """
+    Select the listing that matches this slug. It's the short name of the listing used in its URL.
+    """
+    slug: String!
+  ): MarketplaceListing
+
+  """
+  Look up Marketplace listings
+  """
+  marketplaceListings(
+    """
+    Select listings that can be administered by the specified user.
+    """
+    adminId: ID
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Select listings visible to the viewer even if they are not approved. If omitted or
+    false, only approved listings will be returned.
+    """
+    allStates: Boolean
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Select only listings with the given category.
+    """
+    categorySlug: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Select listings for products owned by the specified organization.
+    """
+    organizationId: ID
+
+    """
+    Select only listings where the primary category matches the given category slug.
+    """
+    primaryCategoryOnly: Boolean = false
+
+    """
+    Select the listings with these slugs, if they are visible to the viewer.
+    """
+    slugs: [String]
+
+    """
+    Also check topic aliases for the category slug
+    """
+    useTopicAliases: Boolean
+
+    """
+    Select listings to which user has admin access. If omitted, listings visible to the
+    viewer are returned.
+    """
+    viewerCanAdmin: Boolean
+
+    """
+    Select only listings that offer a free trial.
+    """
+    withFreeTrialsOnly: Boolean = false
+  ): MarketplaceListingConnection!
+
+  """
+  Return information about the GitHub instance
+  """
+  meta: GitHubMetadata!
+
+  """
+  Fetches an object given its ID.
+  """
+  node(
+    """
+    ID of the object.
+    """
+    id: ID!
+  ): Node
+
+  """
+  Lookup nodes by a list of IDs.
+  """
+  nodes(
+    """
+    The list of node IDs.
+    """
+    ids: [ID!]!
+  ): [Node]!
+
+  """
+  Lookup a organization by login.
+  """
+  organization(
+    """
+    The organization's login.
+    """
+    login: String!
+  ): Organization
+
+  """
+  The client's rate limit information.
+  """
+  rateLimit(
+    """
+    If true, calculate the cost for the query without evaluating it
+    """
+    dryRun: Boolean = false
+  ): RateLimit
+
+  """
+  Workaround for re-exposing the root query object. (Refer to
+  https://github.com/facebook/relay/issues/112 for more information.)
+  """
+  relay: Query!
+
+  """
+  Lookup a given repository by the owner and repository name.
+  """
+  repository(
+    """
+    Follow repository renames. If disabled, a repository referenced by its old name will return an error.
+    """
+    followRenames: Boolean = true
+
+    """
+    The name of the repository
+    """
+    name: String!
+
+    """
+    The login field of a user or organization
+    """
+    owner: String!
+  ): Repository
+
+  """
+  Lookup a repository owner (ie. either a User or an Organization) by login.
+  """
+  repositoryOwner(
+    """
+    The username to lookup the owner by.
+    """
+    login: String!
+  ): RepositoryOwner
+
+  """
+  Lookup resource by a URL.
+  """
+  resource(
+    """
+    The URL.
+    """
+    url: URI!
+  ): UniformResourceLocatable
+
+  """
+  Perform a search across resources, returning a maximum of 1,000 results.
+  """
+  search(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    The search string to look for.
+    """
+    query: String!
+
+    """
+    The types of search items to search within.
+    """
+    type: SearchType!
+  ): SearchResultItemConnection!
+
+  """
+  GitHub Security Advisories
+  """
+  securityAdvisories(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    A list of classifications to filter advisories by.
+    """
+    classifications: [SecurityAdvisoryClassification!]
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Filter advisories by identifier, e.g. GHSA or CVE.
+    """
+    identifier: SecurityAdvisoryIdentifierFilter
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the returned topics.
+    """
+    orderBy: SecurityAdvisoryOrder = {field: UPDATED_AT, direction: DESC}
+
+    """
+    Filter advisories to those published since a time in the past.
+    """
+    publishedSince: DateTime
+
+    """
+    Filter advisories to those updated since a time in the past.
+    """
+    updatedSince: DateTime
+  ): SecurityAdvisoryConnection!
+
+  """
+  Fetch a Security Advisory by its GHSA ID
+  """
+  securityAdvisory(
+    """
+    GitHub Security Advisory ID.
+    """
+    ghsaId: String!
+  ): SecurityAdvisory
+
+  """
+  Software Vulnerabilities documented by GitHub Security Advisories
+  """
+  securityVulnerabilities(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    A list of advisory classifications to filter vulnerabilities by.
+    """
+    classifications: [SecurityAdvisoryClassification!]
+
+    """
+    An ecosystem to filter vulnerabilities by.
+    """
+    ecosystem: SecurityAdvisoryEcosystem
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the returned topics.
+    """
+    orderBy: SecurityVulnerabilityOrder = {field: UPDATED_AT, direction: DESC}
+
+    """
+    A package name to filter vulnerabilities by.
+    """
+    package: String
+
+    """
+    A list of severities to filter vulnerabilities by.
+    """
+    severities: [SecurityAdvisorySeverity!]
+  ): SecurityVulnerabilityConnection!
+
+  """
+  Users and organizations who can be sponsored via GitHub Sponsors.
+  """
+  sponsorables(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Optional filter for which dependencies should be checked for sponsorable
+    owners. Only sponsorable owners of dependencies in this ecosystem will be
+    included. Used when onlyDependencies = true.
+
+    **Upcoming Change on 2022-07-01 UTC**
+    **Description:** `dependencyEcosystem` will be removed. Use the ecosystem argument instead.
+    **Reason:** The type is switching from SecurityAdvisoryEcosystem to DependencyGraphEcosystem.
+    """
+    dependencyEcosystem: SecurityAdvisoryEcosystem
+
+    """
+    Optional filter for which dependencies should be checked for sponsorable
+    owners. Only sponsorable owners of dependencies in this ecosystem will be
+    included. Used when onlyDependencies = true.
+    """
+    ecosystem: DependencyGraphEcosystem
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Whether only sponsorables who own the viewer's dependencies will be
+    returned. Must be authenticated to use. Can check an organization instead
+    for their dependencies owned by sponsorables by passing
+    orgLoginForDependencies.
+    """
+    onlyDependencies: Boolean = false
+
+    """
+    Ordering options for users and organizations returned from the connection.
+    """
+    orderBy: SponsorableOrder = {field: LOGIN, direction: ASC}
+
+    """
+    Optional organization username for whose dependencies should be checked.
+    Used when onlyDependencies = true. Omit to check your own dependencies. If
+    you are not an administrator of the organization, only dependencies from its
+    public repositories will be considered.
+    """
+    orgLoginForDependencies: String
+  ): SponsorableItemConnection!
+
+  """
+  Look up a topic by name.
+  """
+  topic(
+    """
+    The topic's name.
+    """
+    name: String!
+  ): Topic
+
+  """
+  Lookup a user by login.
+  """
+  user(
+    """
+    The user's login.
+    """
+    login: String!
+  ): User
+
+  """
+  The currently authenticated user.
+  """
+  viewer: User!
+}
+
+"""
+Represents the client's rate limit.
+"""
+type RateLimit {
+  """
+  The point cost for the current query counting against the rate limit.
+  """
+  cost: Int!
+
+  """
+  The maximum number of points the client is permitted to consume in a 60 minute window.
+  """
+  limit: Int!
+
+  """
+  The maximum number of nodes this query may return
+  """
+  nodeCount: Int!
+
+  """
+  The number of points remaining in the current rate limit window.
+  """
+  remaining: Int!
+
+  """
+  The time at which the current rate limit window resets in UTC epoch seconds.
+  """
+  resetAt: DateTime!
+
+  """
+  The number of points used in the current rate limit window.
+  """
+  used: Int!
+}
+
+"""
+Represents a subject that can be reacted on.
+"""
+interface Reactable {
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the Reactable object
+  """
+  id: ID!
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+}
+
+"""
+The connection type for User.
+"""
+type ReactingUserConnection {
+  """
+  A list of edges.
+  """
+  edges: [ReactingUserEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a user that's made a reaction.
+"""
+type ReactingUserEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+  node: User!
+
+  """
+  The moment when the user made the reaction.
+  """
+  reactedAt: DateTime!
+}
+
+"""
+An emoji reaction to a particular piece of content.
+"""
+type Reaction implements Node {
+  """
+  Identifies the emoji reaction.
+  """
+  content: ReactionContent!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the Reaction object
+  """
+  id: ID!
+
+  """
+  The reactable piece of content
+  """
+  reactable: Reactable!
+
+  """
+  Identifies the user who created this reaction.
+  """
+  user: User
+}
+
+"""
+A list of reactions that have been left on the subject.
+"""
+type ReactionConnection {
+  """
+  A list of edges.
+  """
+  edges: [ReactionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Reaction]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+
+  """
+  Whether or not the authenticated user has left a reaction on the subject.
+  """
+  viewerHasReacted: Boolean!
+}
+
+"""
+Emojis that can be attached to Issues, Pull Requests and Comments.
+"""
+enum ReactionContent {
+  """
+  Represents the `:confused:` emoji.
+  """
+  CONFUSED
+
+  """
+  Represents the `:eyes:` emoji.
+  """
+  EYES
+
+  """
+  Represents the `:heart:` emoji.
+  """
+  HEART
+
+  """
+  Represents the `:hooray:` emoji.
+  """
+  HOORAY
+
+  """
+  Represents the `:laugh:` emoji.
+  """
+  LAUGH
+
+  """
+  Represents the `:rocket:` emoji.
+  """
+  ROCKET
+
+  """
+  Represents the `:-1:` emoji.
+  """
+  THUMBS_DOWN
+
+  """
+  Represents the `:+1:` emoji.
+  """
+  THUMBS_UP
+}
+
+"""
+An edge in a connection.
+"""
+type ReactionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Reaction
+}
+
+"""
+A group of emoji reactions to a particular piece of content.
+"""
+type ReactionGroup {
+  """
+  Identifies the emoji reaction.
+  """
+  content: ReactionContent!
+
+  """
+  Identifies when the reaction was created.
+  """
+  createdAt: DateTime
+
+  """
+  Reactors to the reaction subject with the emotion represented by this reaction group.
+  """
+  reactors(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ReactorConnection!
+
+  """
+  The subject that was reacted to.
+  """
+  subject: Reactable!
+
+  """
+  Users who have reacted to the reaction subject with the emotion represented by this reaction group
+  """
+  users(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ReactingUserConnection!
+    @deprecated(
+      reason: "Reactors can now be mannequins, bots, and organizations. Use the `reactors` field instead. Removal on 2021-10-01 UTC."
+    )
+
+  """
+  Whether or not the authenticated user has left a reaction on the subject.
+  """
+  viewerHasReacted: Boolean!
+}
+
+"""
+Ways in which lists of reactions can be ordered upon return.
+"""
+input ReactionOrder {
+  """
+  The direction in which to order reactions by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order reactions by.
+  """
+  field: ReactionOrderField!
+}
+
+"""
+A list of fields that reactions can be ordered by.
+"""
+enum ReactionOrderField {
+  """
+  Allows ordering a list of reactions by when they were created.
+  """
+  CREATED_AT
+}
+
+"""
+Types that can be assigned to reactions.
+"""
+union Reactor = Bot | Mannequin | Organization | User
+
+"""
+The connection type for Reactor.
+"""
+type ReactorConnection {
+  """
+  A list of edges.
+  """
+  edges: [ReactorEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Reactor]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents an author of a reaction.
+"""
+type ReactorEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The author of the reaction.
+  """
+  node: Reactor!
+
+  """
+  The moment when the user made the reaction.
+  """
+  reactedAt: DateTime!
+}
+
+"""
+Represents a 'ready_for_review' event on a given pull request.
+"""
+type ReadyForReviewEvent implements Node & UniformResourceLocatable {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the ReadyForReviewEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+
+  """
+  The HTTP path for this ready for review event.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this ready for review event.
+  """
+  url: URI!
+}
+
+"""
+Represents a Git reference.
+"""
+type Ref implements Node {
+  """
+  A list of pull requests with this ref as the head ref.
+  """
+  associatedPullRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    The base ref name to filter the pull requests by.
+    """
+    baseRefName: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    The head ref name to filter the pull requests by.
+    """
+    headRefName: String
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pull requests returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the pull requests by.
+    """
+    states: [PullRequestState!]
+  ): PullRequestConnection!
+
+  """
+  Branch protection rules for this ref
+  """
+  branchProtectionRule: BranchProtectionRule
+
+  """
+  Compares the current ref as a base ref to another head ref, if the comparison can be made.
+  """
+  compare(
+    """
+    The head ref to compare against.
+    """
+    headRef: String!
+  ): Comparison
+
+  """
+  The Node ID of the Ref object
+  """
+  id: ID!
+
+  """
+  The ref name.
+  """
+  name: String!
+
+  """
+  The ref's prefix, such as `refs/heads/` or `refs/tags/`.
+  """
+  prefix: String!
+
+  """
+  Branch protection rules that are viewable by non-admins
+  """
+  refUpdateRule: RefUpdateRule
+
+  """
+  The repository the ref belongs to.
+  """
+  repository: Repository!
+
+  """
+  The object the ref points to. Returns null when object does not exist.
+  """
+  target: GitObject
+}
+
+"""
+The connection type for Ref.
+"""
+type RefConnection {
+  """
+  A list of edges.
+  """
+  edges: [RefEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Ref]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type RefEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Ref
+}
+
+"""
+Parameters to be used for the ref_name condition
+"""
+type RefNameConditionTarget {
+  """
+  Array of ref names or patterns to exclude. The condition will not pass if any of these patterns match.
+  """
+  exclude: [String!]!
+
+  """
+  Array of ref names or patterns to include. One of these patterns must match
+  for the condition to pass. Also accepts `~DEFAULT_BRANCH` to include the
+  default branch or `~ALL` to include all branches.
+  """
+  include: [String!]!
+}
+
+"""
+Parameters to be used for the ref_name condition
+"""
+input RefNameConditionTargetInput {
+  """
+  Array of ref names or patterns to exclude. The condition will not pass if any of these patterns match.
+  """
+  exclude: [String!]!
+
+  """
+  Array of ref names or patterns to include. One of these patterns must match
+  for the condition to pass. Also accepts `~DEFAULT_BRANCH` to include the
+  default branch or `~ALL` to include all branches.
+  """
+  include: [String!]!
+}
+
+"""
+Ways in which lists of git refs can be ordered upon return.
+"""
+input RefOrder {
+  """
+  The direction in which to order refs by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order refs by.
+  """
+  field: RefOrderField!
+}
+
+"""
+Properties by which ref connections can be ordered.
+"""
+enum RefOrderField {
+  """
+  Order refs by their alphanumeric name
+  """
+  ALPHABETICAL
+
+  """
+  Order refs by underlying commit date if the ref prefix is refs/tags/
+  """
+  TAG_COMMIT_DATE
+}
+
+"""
+A ref update
+"""
+input RefUpdate @preview(toggledBy: "update-refs-preview") {
+  """
+  The value this ref should be updated to.
+  """
+  afterOid: GitObjectID!
+
+  """
+  The value this ref needs to point to before the update.
+  """
+  beforeOid: GitObjectID
+
+  """
+  Force a non fast-forward update.
+  """
+  force: Boolean = false
+
+  """
+  The fully qualified name of the ref to be update. For example `refs/heads/branch-name`
+  """
+  name: GitRefname!
+}
+
+"""
+A ref update rules for a viewer.
+"""
+type RefUpdateRule {
+  """
+  Can this branch be deleted.
+  """
+  allowsDeletions: Boolean!
+
+  """
+  Are force pushes allowed on this branch.
+  """
+  allowsForcePushes: Boolean!
+
+  """
+  Can matching branches be created.
+  """
+  blocksCreations: Boolean!
+
+  """
+  Identifies the protection rule pattern.
+  """
+  pattern: String!
+
+  """
+  Number of approving reviews required to update matching branches.
+  """
+  requiredApprovingReviewCount: Int
+
+  """
+  List of required status check contexts that must pass for commits to be accepted to matching branches.
+  """
+  requiredStatusCheckContexts: [String]
+
+  """
+  Are reviews from code owners required to update matching branches.
+  """
+  requiresCodeOwnerReviews: Boolean!
+
+  """
+  Are conversations required to be resolved before merging.
+  """
+  requiresConversationResolution: Boolean!
+
+  """
+  Are merge commits prohibited from being pushed to this branch.
+  """
+  requiresLinearHistory: Boolean!
+
+  """
+  Are commits required to be signed.
+  """
+  requiresSignatures: Boolean!
+
+  """
+  Is the viewer allowed to dismiss reviews.
+  """
+  viewerAllowedToDismissReviews: Boolean!
+
+  """
+  Can the viewer push to the branch
+  """
+  viewerCanPush: Boolean!
+}
+
+"""
+Represents a 'referenced' event on a given `ReferencedSubject`.
+"""
+type ReferencedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the commit associated with the 'referenced' event.
+  """
+  commit: Commit
+
+  """
+  Identifies the repository associated with the 'referenced' event.
+  """
+  commitRepository: Repository!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the ReferencedEvent object
+  """
+  id: ID!
+
+  """
+  Reference originated in a different repository.
+  """
+  isCrossRepository: Boolean!
+
+  """
+  Checks if the commit message itself references the subject. Can be false in the case of a commit comment reference.
+  """
+  isDirectReference: Boolean!
+
+  """
+  Object referenced by event.
+  """
+  subject: ReferencedSubject!
+}
+
+"""
+Any referenceable object
+"""
+union ReferencedSubject = Issue | PullRequest
+
+"""
+Autogenerated input type of RegenerateEnterpriseIdentityProviderRecoveryCodes
+"""
+input RegenerateEnterpriseIdentityProviderRecoveryCodesInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set an identity provider.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+}
+
+"""
+Autogenerated return type of RegenerateEnterpriseIdentityProviderRecoveryCodes
+"""
+type RegenerateEnterpriseIdentityProviderRecoveryCodesPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The identity provider for the enterprise.
+  """
+  identityProvider: EnterpriseIdentityProvider
+}
+
+"""
+Autogenerated input type of RegenerateVerifiableDomainToken
+"""
+input RegenerateVerifiableDomainTokenInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the verifiable domain to regenerate the verification token of.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["VerifiableDomain"])
+}
+
+"""
+Autogenerated return type of RegenerateVerifiableDomainToken
+"""
+type RegenerateVerifiableDomainTokenPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The verification token that was generated.
+  """
+  verificationToken: String
+}
+
+"""
+Autogenerated input type of RejectDeployments
+"""
+input RejectDeploymentsInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Optional comment for rejecting deployments
+  """
+  comment: String = ""
+
+  """
+  The ids of environments to reject deployments
+  """
+  environmentIds: [ID!]!
+
+  """
+  The node ID of the workflow run containing the pending deployments.
+  """
+  workflowRunId: ID! @possibleTypes(concreteTypes: ["WorkflowRun"])
+}
+
+"""
+Autogenerated return type of RejectDeployments
+"""
+type RejectDeploymentsPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The affected deployments.
+  """
+  deployments: [Deployment!]
+}
+
+"""
+A release contains the content for a release.
+"""
+type Release implements Node & Reactable & UniformResourceLocatable {
+  """
+  The author of the release
+  """
+  author: User
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The description of the release.
+  """
+  description: String
+
+  """
+  The description of this release rendered to HTML.
+  """
+  descriptionHTML: HTML
+
+  """
+  The Node ID of the Release object
+  """
+  id: ID!
+
+  """
+  Whether or not the release is a draft
+  """
+  isDraft: Boolean!
+
+  """
+  Whether or not the release is the latest releast
+  """
+  isLatest: Boolean!
+
+  """
+  Whether or not the release is a prerelease
+  """
+  isPrerelease: Boolean!
+
+  """
+  A list of users mentioned in the release description
+  """
+  mentions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection
+
+  """
+  The title of the release.
+  """
+  name: String
+
+  """
+  Identifies the date and time when the release was created.
+  """
+  publishedAt: DateTime
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  List of releases assets which are dependent on this release.
+  """
+  releaseAssets(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    A list of names to filter the assets by.
+    """
+    name: String
+  ): ReleaseAssetConnection!
+
+  """
+  The repository that the release belongs to.
+  """
+  repository: Repository!
+
+  """
+  The HTTP path for this issue
+  """
+  resourcePath: URI!
+
+  """
+  A description of the release, rendered to HTML without any links in it.
+  """
+  shortDescriptionHTML(
+    """
+    How many characters to return.
+    """
+    limit: Int = 200
+  ): HTML
+
+  """
+  The Git tag the release points to
+  """
+  tag: Ref
+
+  """
+  The tag commit for this release.
+  """
+  tagCommit: Commit
+
+  """
+  The name of the release's Git tag
+  """
+  tagName: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this issue
+  """
+  url: URI!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+}
+
+"""
+A release asset contains the content for a release asset.
+"""
+type ReleaseAsset implements Node {
+  """
+  The asset's content-type
+  """
+  contentType: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The number of times this asset was downloaded
+  """
+  downloadCount: Int!
+
+  """
+  Identifies the URL where you can download the release asset via the browser.
+  """
+  downloadUrl: URI!
+
+  """
+  The Node ID of the ReleaseAsset object
+  """
+  id: ID!
+
+  """
+  Identifies the title of the release asset.
+  """
+  name: String!
+
+  """
+  Release that the asset is associated with
+  """
+  release: Release
+
+  """
+  The size (in bytes) of the asset
+  """
+  size: Int!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The user that performed the upload
+  """
+  uploadedBy: User!
+
+  """
+  Identifies the URL of the release asset.
+  """
+  url: URI!
+}
+
+"""
+The connection type for ReleaseAsset.
+"""
+type ReleaseAssetConnection {
+  """
+  A list of edges.
+  """
+  edges: [ReleaseAssetEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ReleaseAsset]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ReleaseAssetEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ReleaseAsset
+}
+
+"""
+The connection type for Release.
+"""
+type ReleaseConnection {
+  """
+  A list of edges.
+  """
+  edges: [ReleaseEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Release]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ReleaseEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Release
+}
+
+"""
+Ways in which lists of releases can be ordered upon return.
+"""
+input ReleaseOrder {
+  """
+  The direction in which to order releases by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order releases by.
+  """
+  field: ReleaseOrderField!
+}
+
+"""
+Properties by which release connections can be ordered.
+"""
+enum ReleaseOrderField {
+  """
+  Order releases by creation time
+  """
+  CREATED_AT
+
+  """
+  Order releases alphabetically by name
+  """
+  NAME
+}
+
+"""
+Autogenerated input type of RemoveAssigneesFromAssignable
+"""
+input RemoveAssigneesFromAssignableInput {
+  """
+  The id of the assignable object to remove assignees from.
+  """
+  assignableId: ID! @possibleTypes(concreteTypes: ["Issue", "PullRequest"], abstractType: "Assignable")
+
+  """
+  The id of users to remove as assignees.
+  """
+  assigneeIds: [ID!]! @possibleTypes(concreteTypes: ["User"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated return type of RemoveAssigneesFromAssignable
+"""
+type RemoveAssigneesFromAssignablePayload {
+  """
+  The item that was unassigned.
+  """
+  assignable: Assignable
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of RemoveEnterpriseAdmin
+"""
+input RemoveEnterpriseAdminInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Enterprise ID from which to remove the administrator.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The login of the user to remove as an administrator.
+  """
+  login: String!
+}
+
+"""
+Autogenerated return type of RemoveEnterpriseAdmin
+"""
+type RemoveEnterpriseAdminPayload {
+  """
+  The user who was removed as an administrator.
+  """
+  admin: User
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated enterprise.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of removing an administrator.
+  """
+  message: String
+
+  """
+  The viewer performing the mutation.
+  """
+  viewer: User
+}
+
+"""
+Autogenerated input type of RemoveEnterpriseIdentityProvider
+"""
+input RemoveEnterpriseIdentityProviderInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise from which to remove the identity provider.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+}
+
+"""
+Autogenerated return type of RemoveEnterpriseIdentityProvider
+"""
+type RemoveEnterpriseIdentityProviderPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The identity provider that was removed from the enterprise.
+  """
+  identityProvider: EnterpriseIdentityProvider
+}
+
+"""
+Autogenerated input type of RemoveEnterpriseMember
+"""
+input RemoveEnterpriseMemberInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise from which the user should be removed.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The ID of the user to remove from the enterprise.
+  """
+  userId: ID! @possibleTypes(concreteTypes: ["User"])
+}
+
+"""
+Autogenerated return type of RemoveEnterpriseMember
+"""
+type RemoveEnterpriseMemberPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated enterprise.
+  """
+  enterprise: Enterprise
+
+  """
+  The user that was removed from the enterprise.
+  """
+  user: User
+
+  """
+  The viewer performing the mutation.
+  """
+  viewer: User
+}
+
+"""
+Autogenerated input type of RemoveEnterpriseOrganization
+"""
+input RemoveEnterpriseOrganizationInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise from which the organization should be removed.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The ID of the organization to remove from the enterprise.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of RemoveEnterpriseOrganization
+"""
+type RemoveEnterpriseOrganizationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated enterprise.
+  """
+  enterprise: Enterprise
+
+  """
+  The organization that was removed from the enterprise.
+  """
+  organization: Organization
+
+  """
+  The viewer performing the mutation.
+  """
+  viewer: User
+}
+
+"""
+Autogenerated input type of RemoveEnterpriseSupportEntitlement
+"""
+input RemoveEnterpriseSupportEntitlementInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Enterprise which the admin belongs to.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The login of a member who will lose the support entitlement.
+  """
+  login: String!
+}
+
+"""
+Autogenerated return type of RemoveEnterpriseSupportEntitlement
+"""
+type RemoveEnterpriseSupportEntitlementPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A message confirming the result of removing the support entitlement.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of RemoveLabelsFromLabelable
+"""
+input RemoveLabelsFromLabelableInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ids of labels to remove.
+  """
+  labelIds: [ID!]! @possibleTypes(concreteTypes: ["Label"])
+
+  """
+  The id of the Labelable to remove labels from.
+  """
+  labelableId: ID! @possibleTypes(concreteTypes: ["Discussion", "Issue", "PullRequest"], abstractType: "Labelable")
+}
+
+"""
+Autogenerated return type of RemoveLabelsFromLabelable
+"""
+type RemoveLabelsFromLabelablePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Labelable the labels were removed from.
+  """
+  labelable: Labelable
+}
+
+"""
+Autogenerated input type of RemoveOutsideCollaborator
+"""
+input RemoveOutsideCollaboratorInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the organization to remove the outside collaborator from.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+
+  """
+  The ID of the outside collaborator to remove.
+  """
+  userId: ID! @possibleTypes(concreteTypes: ["User"])
+}
+
+"""
+Autogenerated return type of RemoveOutsideCollaborator
+"""
+type RemoveOutsideCollaboratorPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The user that was removed as an outside collaborator.
+  """
+  removedUser: User
+}
+
+"""
+Autogenerated input type of RemoveReaction
+"""
+input RemoveReactionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of the emoji reaction to remove.
+  """
+  content: ReactionContent!
+
+  """
+  The Node ID of the subject to modify.
+  """
+  subjectId: ID!
+    @possibleTypes(
+      concreteTypes: [
+        "CommitComment"
+        "Discussion"
+        "DiscussionComment"
+        "Issue"
+        "IssueComment"
+        "PullRequest"
+        "PullRequestReview"
+        "PullRequestReviewComment"
+        "Release"
+        "TeamDiscussion"
+        "TeamDiscussionComment"
+      ]
+      abstractType: "Reactable"
+    )
+}
+
+"""
+Autogenerated return type of RemoveReaction
+"""
+type RemoveReactionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The reaction object.
+  """
+  reaction: Reaction
+
+  """
+  The reaction groups for the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  The reactable subject.
+  """
+  subject: Reactable
+}
+
+"""
+Autogenerated input type of RemoveStar
+"""
+input RemoveStarInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Starrable ID to unstar.
+  """
+  starrableId: ID! @possibleTypes(concreteTypes: ["Gist", "Repository", "Topic"], abstractType: "Starrable")
+}
+
+"""
+Autogenerated return type of RemoveStar
+"""
+type RemoveStarPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The starrable.
+  """
+  starrable: Starrable
+}
+
+"""
+Autogenerated input type of RemoveUpvote
+"""
+input RemoveUpvoteInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the discussion or comment to remove upvote.
+  """
+  subjectId: ID! @possibleTypes(concreteTypes: ["Discussion", "DiscussionComment"], abstractType: "Votable")
+}
+
+"""
+Autogenerated return type of RemoveUpvote
+"""
+type RemoveUpvotePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The votable subject.
+  """
+  subject: Votable
+}
+
+"""
+Represents a 'removed_from_merge_queue' event on a given pull request.
+"""
+type RemovedFromMergeQueueEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the before commit SHA for the 'removed_from_merge_queue' event.
+  """
+  beforeCommit: Commit
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The user who removed this Pull Request from the merge queue
+  """
+  enqueuer: User
+
+  """
+  The Node ID of the RemovedFromMergeQueueEvent object
+  """
+  id: ID!
+
+  """
+  The merge queue where this pull request was removed from.
+  """
+  mergeQueue: MergeQueue
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest
+
+  """
+  The reason this pull request was removed from the queue.
+  """
+  reason: String
+}
+
+"""
+Represents a 'removed_from_project' event on a given issue or pull request.
+"""
+type RemovedFromProjectEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the RemovedFromProjectEvent object
+  """
+  id: ID!
+
+  """
+  Project referenced by event.
+  """
+  project: Project @preview(toggledBy: "starfox-preview")
+
+  """
+  Column name referenced by this project event.
+  """
+  projectColumnName: String! @preview(toggledBy: "starfox-preview")
+}
+
+"""
+Represents a 'renamed' event on a given issue or pull request
+"""
+type RenamedTitleEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the current title of the issue or pull request.
+  """
+  currentTitle: String!
+
+  """
+  The Node ID of the RenamedTitleEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the previous title of the issue or pull request.
+  """
+  previousTitle: String!
+
+  """
+  Subject that was renamed.
+  """
+  subject: RenamedTitleSubject!
+}
+
+"""
+An object which has a renamable title
+"""
+union RenamedTitleSubject = Issue | PullRequest
+
+"""
+Autogenerated input type of ReopenDiscussion
+"""
+input ReopenDiscussionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the discussion to be reopened.
+  """
+  discussionId: ID! @possibleTypes(concreteTypes: ["Discussion"])
+}
+
+"""
+Autogenerated return type of ReopenDiscussion
+"""
+type ReopenDiscussionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The discussion that was reopened.
+  """
+  discussion: Discussion
+}
+
+"""
+Autogenerated input type of ReopenIssue
+"""
+input ReopenIssueInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the issue to be opened.
+  """
+  issueId: ID! @possibleTypes(concreteTypes: ["Issue"])
+}
+
+"""
+Autogenerated return type of ReopenIssue
+"""
+type ReopenIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The issue that was opened.
+  """
+  issue: Issue
+}
+
+"""
+Autogenerated input type of ReopenPullRequest
+"""
+input ReopenPullRequestInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the pull request to be reopened.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of ReopenPullRequest
+"""
+type ReopenPullRequestPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request that was reopened.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Represents a 'reopened' event on any `Closable`.
+"""
+type ReopenedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Object that was reopened.
+  """
+  closable: Closable!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the ReopenedEvent object
+  """
+  id: ID!
+
+  """
+  The reason the issue state was changed to open.
+  """
+  stateReason: IssueStateReason
+}
+
+"""
+Audit log entry for a repo.access event.
+"""
+type RepoAccessAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoAccessAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+
+  """
+  The visibility of the repository
+  """
+  visibility: RepoAccessAuditEntryVisibility
+}
+
+"""
+The privacy of a repository
+"""
+enum RepoAccessAuditEntryVisibility {
+  """
+  The repository is visible only to users in the same business.
+  """
+  INTERNAL
+
+  """
+  The repository is visible only to those with explicit access.
+  """
+  PRIVATE
+
+  """
+  The repository is visible to everyone.
+  """
+  PUBLIC
+}
+
+"""
+Audit log entry for a repo.add_member event.
+"""
+type RepoAddMemberAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoAddMemberAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+
+  """
+  The visibility of the repository
+  """
+  visibility: RepoAddMemberAuditEntryVisibility
+}
+
+"""
+The privacy of a repository
+"""
+enum RepoAddMemberAuditEntryVisibility {
+  """
+  The repository is visible only to users in the same business.
+  """
+  INTERNAL
+
+  """
+  The repository is visible only to those with explicit access.
+  """
+  PRIVATE
+
+  """
+  The repository is visible to everyone.
+  """
+  PUBLIC
+}
+
+"""
+Audit log entry for a repo.add_topic event.
+"""
+type RepoAddTopicAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData & TopicAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoAddTopicAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The name of the topic added to the repository
+  """
+  topic: Topic
+
+  """
+  The name of the topic added to the repository
+  """
+  topicName: String
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.archived event.
+"""
+type RepoArchivedAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoArchivedAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+
+  """
+  The visibility of the repository
+  """
+  visibility: RepoArchivedAuditEntryVisibility
+}
+
+"""
+The privacy of a repository
+"""
+enum RepoArchivedAuditEntryVisibility {
+  """
+  The repository is visible only to users in the same business.
+  """
+  INTERNAL
+
+  """
+  The repository is visible only to those with explicit access.
+  """
+  PRIVATE
+
+  """
+  The repository is visible to everyone.
+  """
+  PUBLIC
+}
+
+"""
+Audit log entry for a repo.change_merge_setting event.
+"""
+type RepoChangeMergeSettingAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoChangeMergeSettingAuditEntry object
+  """
+  id: ID!
+
+  """
+  Whether the change was to enable (true) or disable (false) the merge type
+  """
+  isEnabled: Boolean
+
+  """
+  The merge method affected by the change
+  """
+  mergeType: RepoChangeMergeSettingAuditEntryMergeType
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The merge options available for pull requests to this repository.
+"""
+enum RepoChangeMergeSettingAuditEntryMergeType {
+  """
+  The pull request is added to the base branch in a merge commit.
+  """
+  MERGE
+
+  """
+  Commits from the pull request are added onto the base branch individually without a merge commit.
+  """
+  REBASE
+
+  """
+  The pull request's commits are squashed into a single commit before they are merged to the base branch.
+  """
+  SQUASH
+}
+
+"""
+Audit log entry for a repo.config.disable_anonymous_git_access event.
+"""
+type RepoConfigDisableAnonymousGitAccessAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigDisableAnonymousGitAccessAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.config.disable_collaborators_only event.
+"""
+type RepoConfigDisableCollaboratorsOnlyAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigDisableCollaboratorsOnlyAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.config.disable_contributors_only event.
+"""
+type RepoConfigDisableContributorsOnlyAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigDisableContributorsOnlyAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.config.disable_sockpuppet_disallowed event.
+"""
+type RepoConfigDisableSockpuppetDisallowedAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigDisableSockpuppetDisallowedAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.config.enable_anonymous_git_access event.
+"""
+type RepoConfigEnableAnonymousGitAccessAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigEnableAnonymousGitAccessAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.config.enable_collaborators_only event.
+"""
+type RepoConfigEnableCollaboratorsOnlyAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigEnableCollaboratorsOnlyAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.config.enable_contributors_only event.
+"""
+type RepoConfigEnableContributorsOnlyAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigEnableContributorsOnlyAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.config.enable_sockpuppet_disallowed event.
+"""
+type RepoConfigEnableSockpuppetDisallowedAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigEnableSockpuppetDisallowedAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.config.lock_anonymous_git_access event.
+"""
+type RepoConfigLockAnonymousGitAccessAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigLockAnonymousGitAccessAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.config.unlock_anonymous_git_access event.
+"""
+type RepoConfigUnlockAnonymousGitAccessAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoConfigUnlockAnonymousGitAccessAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repo.create event.
+"""
+type RepoCreateAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The name of the parent repository for this forked repository.
+  """
+  forkParentName: String
+
+  """
+  The name of the root repository for this network.
+  """
+  forkSourceName: String
+
+  """
+  The Node ID of the RepoCreateAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+
+  """
+  The visibility of the repository
+  """
+  visibility: RepoCreateAuditEntryVisibility
+}
+
+"""
+The privacy of a repository
+"""
+enum RepoCreateAuditEntryVisibility {
+  """
+  The repository is visible only to users in the same business.
+  """
+  INTERNAL
+
+  """
+  The repository is visible only to those with explicit access.
+  """
+  PRIVATE
+
+  """
+  The repository is visible to everyone.
+  """
+  PUBLIC
+}
+
+"""
+Audit log entry for a repo.destroy event.
+"""
+type RepoDestroyAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoDestroyAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+
+  """
+  The visibility of the repository
+  """
+  visibility: RepoDestroyAuditEntryVisibility
+}
+
+"""
+The privacy of a repository
+"""
+enum RepoDestroyAuditEntryVisibility {
+  """
+  The repository is visible only to users in the same business.
+  """
+  INTERNAL
+
+  """
+  The repository is visible only to those with explicit access.
+  """
+  PRIVATE
+
+  """
+  The repository is visible to everyone.
+  """
+  PUBLIC
+}
+
+"""
+Audit log entry for a repo.remove_member event.
+"""
+type RepoRemoveMemberAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoRemoveMemberAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+
+  """
+  The visibility of the repository
+  """
+  visibility: RepoRemoveMemberAuditEntryVisibility
+}
+
+"""
+The privacy of a repository
+"""
+enum RepoRemoveMemberAuditEntryVisibility {
+  """
+  The repository is visible only to users in the same business.
+  """
+  INTERNAL
+
+  """
+  The repository is visible only to those with explicit access.
+  """
+  PRIVATE
+
+  """
+  The repository is visible to everyone.
+  """
+  PUBLIC
+}
+
+"""
+Audit log entry for a repo.remove_topic event.
+"""
+type RepoRemoveTopicAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData & TopicAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the RepoRemoveTopicAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The name of the topic added to the repository
+  """
+  topic: Topic
+
+  """
+  The name of the topic added to the repository
+  """
+  topicName: String
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The reasons a piece of content can be reported or minimized.
+"""
+enum ReportedContentClassifiers {
+  """
+  An abusive or harassing piece of content
+  """
+  ABUSE
+
+  """
+  A duplicated piece of content
+  """
+  DUPLICATE
+
+  """
+  An irrelevant piece of content
+  """
+  OFF_TOPIC
+
+  """
+  An outdated piece of content
+  """
+  OUTDATED
+
+  """
+  The content has been resolved
+  """
+  RESOLVED
+
+  """
+  A spammy piece of content
+  """
+  SPAM
+}
+
+"""
+A repository contains the content for a project.
+"""
+type Repository implements Node & PackageOwner & ProjectOwner & ProjectV2Recent & RepositoryInfo & Starrable & Subscribable & UniformResourceLocatable {
+  """
+  Whether or not a pull request head branch that is behind its base branch can
+  always be updated even if it is not required to be up to date before merging.
+  """
+  allowUpdateBranch: Boolean!
+
+  """
+  Identifies the date and time when the repository was archived.
+  """
+  archivedAt: DateTime
+
+  """
+  A list of users that can be assigned to issues in this repository.
+  """
+  assignableUsers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filters users with query on user name and login.
+    """
+    query: String
+  ): UserConnection!
+
+  """
+  Whether or not Auto-merge can be enabled on pull requests in this repository.
+  """
+  autoMergeAllowed: Boolean!
+
+  """
+  A list of branch protection rules for this repository.
+  """
+  branchProtectionRules(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): BranchProtectionRuleConnection!
+
+  """
+  Returns the code of conduct for this repository
+  """
+  codeOfConduct: CodeOfConduct
+
+  """
+  Information extracted from the repository's `CODEOWNERS` file.
+  """
+  codeowners(
+    """
+    The ref name used to return the associated `CODEOWNERS` file.
+    """
+    refName: String
+  ): RepositoryCodeowners
+
+  """
+  A list of collaborators associated with the repository.
+  """
+  collaborators(
+    """
+    Collaborators affiliation level with a repository.
+    """
+    affiliation: CollaboratorAffiliation
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    The login of one specific collaborator.
+    """
+    login: String
+
+    """
+    Filters users with query on user name and login
+    """
+    query: String
+  ): RepositoryCollaboratorConnection
+
+  """
+  A list of commit comments associated with the repository.
+  """
+  commitComments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CommitCommentConnection!
+
+  """
+  Returns a list of contact links associated to the repository
+  """
+  contactLinks: [RepositoryContactLink!]
+
+  """
+  Returns the contributing guidelines for this repository.
+  """
+  contributingGuidelines: ContributingGuidelines
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Ref associated with the repository's default branch.
+  """
+  defaultBranchRef: Ref
+
+  """
+  Whether or not branches are automatically deleted when merged in this repository.
+  """
+  deleteBranchOnMerge: Boolean!
+
+  """
+  A list of dependency manifests contained in the repository
+  """
+  dependencyGraphManifests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Cursor to paginate dependencies
+    """
+    dependenciesAfter: String
+
+    """
+    Number of dependencies to fetch
+    """
+    dependenciesFirst: Int
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Flag to scope to only manifests with dependencies
+    """
+    withDependencies: Boolean
+  ): DependencyGraphManifestConnection @preview(toggledBy: "hawkgirl-preview")
+
+  """
+  A list of deploy keys that are on this repository.
+  """
+  deployKeys(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DeployKeyConnection!
+
+  """
+  Deployments associated with the repository
+  """
+  deployments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Environments to list deployments for
+    """
+    environments: [String!]
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for deployments returned from the connection.
+    """
+    orderBy: DeploymentOrder = {field: CREATED_AT, direction: ASC}
+  ): DeploymentConnection!
+
+  """
+  The description of the repository.
+  """
+  description: String
+
+  """
+  The description of the repository rendered to HTML.
+  """
+  descriptionHTML: HTML!
+
+  """
+  Returns a single discussion from the current repository by number.
+  """
+  discussion(
+    """
+    The number for the discussion to be returned.
+    """
+    number: Int!
+  ): Discussion
+
+  """
+  A list of discussion categories that are available in the repository.
+  """
+  discussionCategories(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Filter by categories that are assignable by the viewer.
+    """
+    filterByAssignable: Boolean = false
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DiscussionCategoryConnection!
+
+  """
+  A discussion category by slug.
+  """
+  discussionCategory(
+    """
+    The slug of the discussion category to be returned.
+    """
+    slug: String!
+  ): DiscussionCategory
+
+  """
+  A list of discussions that have been opened in the repository.
+  """
+  discussions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Only show answered or unanswered discussions
+    """
+    answered: Boolean = null
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Only include discussions that belong to the category with this ID.
+    """
+    categoryId: ID = null
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for discussions returned from the connection.
+    """
+    orderBy: DiscussionOrder = {field: UPDATED_AT, direction: DESC}
+
+    """
+    A list of states to filter the discussions by.
+    """
+    states: [DiscussionState!] = []
+  ): DiscussionConnection!
+
+  """
+  The number of kilobytes this repository occupies on disk.
+  """
+  diskUsage: Int
+
+  """
+  Returns a single active environment from the current repository by name.
+  """
+  environment(
+    """
+    The name of the environment to be returned.
+    """
+    name: String!
+  ): Environment
+
+  """
+  A list of environments that are in this repository.
+  """
+  environments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the environments
+    """
+    orderBy: Environments = {field: NAME, direction: ASC}
+  ): EnvironmentConnection!
+
+  """
+  Returns how many forks there are of this repository in the whole network.
+  """
+  forkCount: Int!
+
+  """
+  Whether this repository allows forks.
+  """
+  forkingAllowed: Boolean!
+
+  """
+  A list of direct forked repositories.
+  """
+  forks(
+    """
+    Array of viewer's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    current viewer owns.
+    """
+    affiliations: [RepositoryAffiliation]
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    If non-null, filters repositories according to whether they have issues enabled
+    """
+    hasIssuesEnabled: Boolean
+
+    """
+    If non-null, filters repositories according to whether they have been locked
+    """
+    isLocked: Boolean
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories returned from the connection
+    """
+    orderBy: RepositoryOrder
+
+    """
+    Array of owner's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    organization or user being viewed owns.
+    """
+    ownerAffiliations: [RepositoryAffiliation] = [OWNER, COLLABORATOR]
+
+    """
+    If non-null, filters repositories according to privacy
+    """
+    privacy: RepositoryPrivacy
+  ): RepositoryConnection!
+
+  """
+  The funding links for this repository
+  """
+  fundingLinks: [FundingLink!]!
+
+  """
+  Indicates if the repository has the Discussions feature enabled.
+  """
+  hasDiscussionsEnabled: Boolean!
+
+  """
+  Indicates if the repository has issues feature enabled.
+  """
+  hasIssuesEnabled: Boolean!
+
+  """
+  Indicates if the repository has the Projects feature enabled.
+  """
+  hasProjectsEnabled: Boolean!
+
+  """
+  Whether vulnerability alerts are enabled for the repository.
+  """
+  hasVulnerabilityAlertsEnabled: Boolean!
+
+  """
+  Indicates if the repository has wiki feature enabled.
+  """
+  hasWikiEnabled: Boolean!
+
+  """
+  The repository's URL.
+  """
+  homepageUrl: URI
+
+  """
+  The Node ID of the Repository object
+  """
+  id: ID!
+
+  """
+  The interaction ability settings for this repository.
+  """
+  interactionAbility: RepositoryInteractionAbility
+
+  """
+  Indicates if the repository is unmaintained.
+  """
+  isArchived: Boolean!
+
+  """
+  Returns true if blank issue creation is allowed
+  """
+  isBlankIssuesEnabled: Boolean!
+
+  """
+  Returns whether or not this repository disabled.
+  """
+  isDisabled: Boolean!
+
+  """
+  Returns whether or not this repository is empty.
+  """
+  isEmpty: Boolean!
+
+  """
+  Identifies if the repository is a fork.
+  """
+  isFork: Boolean!
+
+  """
+  Indicates if a repository is either owned by an organization, or is a private fork of an organization repository.
+  """
+  isInOrganization: Boolean!
+
+  """
+  Indicates if the repository has been locked or not.
+  """
+  isLocked: Boolean!
+
+  """
+  Identifies if the repository is a mirror.
+  """
+  isMirror: Boolean!
+
+  """
+  Identifies if the repository is private or internal.
+  """
+  isPrivate: Boolean!
+
+  """
+  Returns true if this repository has a security policy
+  """
+  isSecurityPolicyEnabled: Boolean
+
+  """
+  Identifies if the repository is a template that can be used to generate new repositories.
+  """
+  isTemplate: Boolean!
+
+  """
+  Is this repository a user configuration repository?
+  """
+  isUserConfigurationRepository: Boolean!
+
+  """
+  Returns a single issue from the current repository by number.
+  """
+  issue(
+    """
+    The number for the issue to be returned.
+    """
+    number: Int!
+  ): Issue
+
+  """
+  Returns a single issue-like object from the current repository by number.
+  """
+  issueOrPullRequest(
+    """
+    The number for the issue to be returned.
+    """
+    number: Int!
+  ): IssueOrPullRequest
+
+  """
+  Returns a list of issue templates associated to the repository
+  """
+  issueTemplates: [IssueTemplate!]
+
+  """
+  A list of issues that have been opened in the repository.
+  """
+  issues(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Filtering options for issues returned from the connection.
+    """
+    filterBy: IssueFilters
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for issues returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the issues by.
+    """
+    states: [IssueState!]
+  ): IssueConnection!
+
+  """
+  Returns a single label by name
+  """
+  label(
+    """
+    Label name
+    """
+    name: String!
+  ): Label
+
+  """
+  A list of labels associated with the repository.
+  """
+  labels(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for labels returned from the connection.
+    """
+    orderBy: LabelOrder = {field: CREATED_AT, direction: ASC}
+
+    """
+    If provided, searches labels by name and description.
+    """
+    query: String
+  ): LabelConnection
+
+  """
+  A list containing a breakdown of the language composition of the repository.
+  """
+  languages(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: LanguageOrder
+  ): LanguageConnection
+
+  """
+  Get the latest release for the repository if one exists.
+  """
+  latestRelease: Release
+
+  """
+  The license associated with the repository
+  """
+  licenseInfo: License
+
+  """
+  The reason the repository has been locked.
+  """
+  lockReason: RepositoryLockReason
+
+  """
+  A list of Users that can be mentioned in the context of the repository.
+  """
+  mentionableUsers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filters users with query on user name and login
+    """
+    query: String
+  ): UserConnection!
+
+  """
+  Whether or not PRs are merged with a merge commit on this repository.
+  """
+  mergeCommitAllowed: Boolean!
+
+  """
+  How the default commit message will be generated when merging a pull request.
+  """
+  mergeCommitMessage: MergeCommitMessage!
+
+  """
+  How the default commit title will be generated when merging a pull request.
+  """
+  mergeCommitTitle: MergeCommitTitle!
+
+  """
+  The merge queue for a specified branch, otherwise the default branch if not provided.
+  """
+  mergeQueue(
+    """
+    The name of the branch to get the merge queue for. Case sensitive.
+    """
+    branch: String
+  ): MergeQueue
+
+  """
+  Returns a single milestone from the current repository by number.
+  """
+  milestone(
+    """
+    The number for the milestone to be returned.
+    """
+    number: Int!
+  ): Milestone
+
+  """
+  A list of milestones associated with the repository.
+  """
+  milestones(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for milestones.
+    """
+    orderBy: MilestoneOrder
+
+    """
+    Filters milestones with a query on the title
+    """
+    query: String
+
+    """
+    Filter by the state of the milestones.
+    """
+    states: [MilestoneState!]
+  ): MilestoneConnection
+
+  """
+  The repository's original mirror URL.
+  """
+  mirrorUrl: URI
+
+  """
+  The name of the repository.
+  """
+  name: String!
+
+  """
+  The repository's name with owner.
+  """
+  nameWithOwner: String!
+
+  """
+  A Git object in the repository
+  """
+  object(
+    """
+    A Git revision expression suitable for rev-parse
+    """
+    expression: String
+
+    """
+    The Git object ID
+    """
+    oid: GitObjectID
+  ): GitObject
+
+  """
+  The image used to represent this repository in Open Graph data.
+  """
+  openGraphImageUrl: URI!
+
+  """
+  The User owner of the repository.
+  """
+  owner: RepositoryOwner!
+
+  """
+  A list of packages under the owner.
+  """
+  packages(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Find packages by their names.
+    """
+    names: [String]
+
+    """
+    Ordering of the returned packages.
+    """
+    orderBy: PackageOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    Filter registry package by type.
+    """
+    packageType: PackageType
+
+    """
+    Find packages in a repository by ID.
+    """
+    repositoryId: ID
+  ): PackageConnection!
+
+  """
+  The repository parent, if this is a fork.
+  """
+  parent: Repository
+
+  """
+  A list of discussions that have been pinned in this repository.
+  """
+  pinnedDiscussions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PinnedDiscussionConnection!
+
+  """
+  A list of pinned issues for this repository.
+  """
+  pinnedIssues(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PinnedIssueConnection
+
+  """
+  The primary language of the repository's code.
+  """
+  primaryLanguage: Language
+
+  """
+  Find project by number.
+  """
+  project(
+    """
+    The project number to find.
+    """
+    number: Int!
+  ): Project
+
+  """
+  Finds and returns the Project according to the provided Project number.
+  """
+  projectV2(
+    """
+    The Project number.
+    """
+    number: Int!
+  ): ProjectV2
+
+  """
+  A list of projects under the owner.
+  """
+  projects(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for projects returned from the connection
+    """
+    orderBy: ProjectOrder
+
+    """
+    Query to search projects by, currently only searching by name.
+    """
+    search: String
+
+    """
+    A list of states to filter the projects by.
+    """
+    states: [ProjectState!]
+  ): ProjectConnection!
+
+  """
+  The HTTP path listing the repository's projects
+  """
+  projectsResourcePath: URI!
+
+  """
+  The HTTP URL listing the repository's projects
+  """
+  projectsUrl: URI!
+
+  """
+  List of projects linked to this repository.
+  """
+  projectsV2(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    How to order the returned projects.
+    """
+    orderBy: ProjectV2Order = {field: NUMBER, direction: DESC}
+
+    """
+    A project to search for linked to the repo.
+    """
+    query: String
+  ): ProjectV2Connection!
+
+  """
+  Returns a single pull request from the current repository by number.
+  """
+  pullRequest(
+    """
+    The number for the pull request to be returned.
+    """
+    number: Int!
+  ): PullRequest
+
+  """
+  Returns a list of pull request templates associated to the repository
+  """
+  pullRequestTemplates: [PullRequestTemplate!]
+
+  """
+  A list of pull requests that have been opened in the repository.
+  """
+  pullRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    The base ref name to filter the pull requests by.
+    """
+    baseRefName: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    The head ref name to filter the pull requests by.
+    """
+    headRefName: String
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pull requests returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the pull requests by.
+    """
+    states: [PullRequestState!]
+  ): PullRequestConnection!
+
+  """
+  Identifies the date and time when the repository was last pushed to.
+  """
+  pushedAt: DateTime
+
+  """
+  Whether or not rebase-merging is enabled on this repository.
+  """
+  rebaseMergeAllowed: Boolean!
+
+  """
+  Recent projects that this user has modified in the context of the owner.
+  """
+  recentProjects(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2Connection!
+
+  """
+  Fetch a given ref from the repository
+  """
+  ref(
+    """
+    The ref to retrieve. Fully qualified matches are checked in order
+    (`refs/heads/master`) before falling back onto checks for short name matches (`master`).
+    """
+    qualifiedName: String!
+  ): Ref
+
+  """
+  Fetch a list of refs from the repository
+  """
+  refs(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    DEPRECATED: use orderBy. The ordering direction.
+    """
+    direction: OrderDirection
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for refs returned from the connection.
+    """
+    orderBy: RefOrder
+
+    """
+    Filters refs with query on name
+    """
+    query: String
+
+    """
+    A ref name prefix like `refs/heads/`, `refs/tags/`, etc.
+    """
+    refPrefix: String!
+  ): RefConnection
+
+  """
+  Lookup a single release given various criteria.
+  """
+  release(
+    """
+    The name of the Tag the Release was created from
+    """
+    tagName: String!
+  ): Release
+
+  """
+  List of releases which are dependent on this repository.
+  """
+  releases(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: ReleaseOrder
+  ): ReleaseConnection!
+
+  """
+  A list of applied repository-topic associations for this repository.
+  """
+  repositoryTopics(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): RepositoryTopicConnection!
+
+  """
+  The HTTP path for this repository
+  """
+  resourcePath: URI!
+
+  """
+  Returns a single ruleset from the current repository by ID.
+  """
+  ruleset(
+    """
+    The ID of the ruleset to be returned.
+    """
+    databaseId: Int!
+
+    """
+    Include rulesets configured at higher levels that apply to this repository
+    """
+    includeParents: Boolean = true
+  ): RepositoryRuleset
+
+  """
+  A list of rulesets for this repository.
+  """
+  rulesets(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Return rulesets configured at higher levels that apply to this repository
+    """
+    includeParents: Boolean = true
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): RepositoryRulesetConnection
+
+  """
+  The security policy URL.
+  """
+  securityPolicyUrl: URI
+
+  """
+  A description of the repository, rendered to HTML without any links in it.
+  """
+  shortDescriptionHTML(
+    """
+    How many characters to return.
+    """
+    limit: Int = 200
+  ): HTML!
+
+  """
+  Whether or not squash-merging is enabled on this repository.
+  """
+  squashMergeAllowed: Boolean!
+
+  """
+  How the default commit message will be generated when squash merging a pull request.
+  """
+  squashMergeCommitMessage: SquashMergeCommitMessage!
+
+  """
+  How the default commit title will be generated when squash merging a pull request.
+  """
+  squashMergeCommitTitle: SquashMergeCommitTitle!
+
+  """
+  Whether a squash merge commit can use the pull request title as default.
+  """
+  squashPrTitleUsedAsDefault: Boolean!
+    @deprecated(
+      reason: "`squashPrTitleUsedAsDefault` will be removed. Use `Repository.squashMergeCommitTitle` instead. Removal on 2023-04-01 UTC."
+    )
+
+  """
+  The SSH URL to clone this repository
+  """
+  sshUrl: GitSSHRemote!
+
+  """
+  Returns a count of how many stargazers there are on this object
+  """
+  stargazerCount: Int!
+
+  """
+  A list of users who have starred this starrable.
+  """
+  stargazers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: StarOrder
+  ): StargazerConnection!
+
+  """
+  Returns a list of all submodules in this repository parsed from the
+  .gitmodules file as of the default branch's HEAD commit.
+  """
+  submodules(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): SubmoduleConnection!
+
+  """
+  Temporary authentication token for cloning this repository.
+  """
+  tempCloneToken: String
+
+  """
+  The repository from which this repository was generated, if any.
+  """
+  templateRepository: Repository
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this repository
+  """
+  url: URI!
+
+  """
+  Whether this repository has a custom image to use with Open Graph as opposed to being represented by the owner's avatar.
+  """
+  usesCustomOpenGraphImage: Boolean!
+
+  """
+  Indicates whether the viewer has admin permissions on this repository.
+  """
+  viewerCanAdminister: Boolean!
+
+  """
+  Can the current viewer create new projects on this owner.
+  """
+  viewerCanCreateProjects: Boolean!
+
+  """
+  Check if the viewer is able to change their subscription status for the repository.
+  """
+  viewerCanSubscribe: Boolean!
+
+  """
+  Indicates whether the viewer can update the topics of this repository.
+  """
+  viewerCanUpdateTopics: Boolean!
+
+  """
+  The last commit email for the viewer.
+  """
+  viewerDefaultCommitEmail: String
+
+  """
+  The last used merge method by the viewer or the default for the repository.
+  """
+  viewerDefaultMergeMethod: PullRequestMergeMethod!
+
+  """
+  Returns a boolean indicating whether the viewing user has starred this starrable.
+  """
+  viewerHasStarred: Boolean!
+
+  """
+  The users permission level on the repository. Will return null if authenticated as an GitHub App.
+  """
+  viewerPermission: RepositoryPermission
+
+  """
+  A list of emails this viewer can commit with.
+  """
+  viewerPossibleCommitEmails: [String!]
+
+  """
+  Identifies if the viewer is watching, not watching, or ignoring the subscribable entity.
+  """
+  viewerSubscription: SubscriptionState
+
+  """
+  Indicates the repository's visibility level.
+  """
+  visibility: RepositoryVisibility!
+
+  """
+  Returns a single vulnerability alert from the current repository by number.
+  """
+  vulnerabilityAlert(
+    """
+    The number for the vulnerability alert to be returned.
+    """
+    number: Int!
+  ): RepositoryVulnerabilityAlert
+
+  """
+  A list of vulnerability alerts that are on this repository.
+  """
+  vulnerabilityAlerts(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Filter by the scope of the alert's dependency
+    """
+    dependencyScopes: [RepositoryVulnerabilityAlertDependencyScope!]
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter by the state of the alert
+    """
+    states: [RepositoryVulnerabilityAlertState!]
+  ): RepositoryVulnerabilityAlertConnection
+
+  """
+  A list of users watching the repository.
+  """
+  watchers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserConnection!
+
+  """
+  Whether contributors are required to sign off on web-based commits in this repository.
+  """
+  webCommitSignoffRequired: Boolean!
+}
+
+"""
+The affiliation of a user to a repository
+"""
+enum RepositoryAffiliation {
+  """
+  Repositories that the user has been added to as a collaborator.
+  """
+  COLLABORATOR
+
+  """
+  Repositories that the user has access to through being a member of an
+  organization. This includes every repository on every team that the user is on.
+  """
+  ORGANIZATION_MEMBER
+
+  """
+  Repositories that are owned by the authenticated user.
+  """
+  OWNER
+}
+
+"""
+Metadata for an audit entry with action repo.*
+"""
+interface RepositoryAuditEntryData {
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+}
+
+"""
+Information extracted from a repository's `CODEOWNERS` file.
+"""
+type RepositoryCodeowners {
+  """
+  Any problems that were encountered while parsing the `CODEOWNERS` file.
+  """
+  errors: [RepositoryCodeownersError!]!
+}
+
+"""
+An error in a `CODEOWNERS` file.
+"""
+type RepositoryCodeownersError {
+  """
+  The column number where the error occurs.
+  """
+  column: Int!
+
+  """
+  A short string describing the type of error.
+  """
+  kind: String!
+
+  """
+  The line number where the error occurs.
+  """
+  line: Int!
+
+  """
+  A complete description of the error, combining information from other fields.
+  """
+  message: String!
+
+  """
+  The path to the file when the error occurs.
+  """
+  path: String!
+
+  """
+  The content of the line where the error occurs.
+  """
+  source: String!
+
+  """
+  A suggestion of how to fix the error.
+  """
+  suggestion: String
+}
+
+"""
+The connection type for User.
+"""
+type RepositoryCollaboratorConnection {
+  """
+  A list of edges.
+  """
+  edges: [RepositoryCollaboratorEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a user who is a collaborator of a repository.
+"""
+type RepositoryCollaboratorEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+  node: User!
+
+  """
+  The permission the user has on the repository.
+  """
+  permission: RepositoryPermission!
+
+  """
+  A list of sources for the user's access to the repository.
+  """
+  permissionSources: [PermissionSource!]
+}
+
+"""
+A list of repositories owned by the subject.
+"""
+type RepositoryConnection {
+  """
+  A list of edges.
+  """
+  edges: [RepositoryEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Repository]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+
+  """
+  The total size in kilobytes of all repositories in the connection. Value will
+  never be larger than max 32-bit signed integer.
+  """
+  totalDiskUsage: Int!
+}
+
+"""
+A repository contact link.
+"""
+type RepositoryContactLink {
+  """
+  The contact link purpose.
+  """
+  about: String!
+
+  """
+  The contact link name.
+  """
+  name: String!
+
+  """
+  The contact link URL.
+  """
+  url: URI!
+}
+
+"""
+The reason a repository is listed as 'contributed'.
+"""
+enum RepositoryContributionType {
+  """
+  Created a commit
+  """
+  COMMIT
+
+  """
+  Created an issue
+  """
+  ISSUE
+
+  """
+  Created a pull request
+  """
+  PULL_REQUEST
+
+  """
+  Reviewed a pull request
+  """
+  PULL_REQUEST_REVIEW
+
+  """
+  Created the repository
+  """
+  REPOSITORY
+}
+
+"""
+Represents an author of discussions in repositories.
+"""
+interface RepositoryDiscussionAuthor {
+  """
+  Discussions this user has started.
+  """
+  repositoryDiscussions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Filter discussions to only those that have been answered or not. Defaults to
+    including both answered and unanswered discussions.
+    """
+    answered: Boolean = null
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for discussions returned from the connection.
+    """
+    orderBy: DiscussionOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    Filter discussions to only those in a specific repository.
+    """
+    repositoryId: ID
+
+    """
+    A list of states to filter the discussions by.
+    """
+    states: [DiscussionState!] = []
+  ): DiscussionConnection!
+}
+
+"""
+Represents an author of discussion comments in repositories.
+"""
+interface RepositoryDiscussionCommentAuthor {
+  """
+  Discussion comments this user has authored.
+  """
+  repositoryDiscussionComments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter discussion comments to only those that were marked as the answer
+    """
+    onlyAnswers: Boolean = false
+
+    """
+    Filter discussion comments to only those in a specific repository.
+    """
+    repositoryId: ID
+  ): DiscussionCommentConnection!
+}
+
+"""
+An edge in a connection.
+"""
+type RepositoryEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Repository
+}
+
+"""
+Parameters to be used for the repository_id condition
+"""
+type RepositoryIdConditionTarget {
+  """
+  One of these repo IDs must match the repo.
+  """
+  repositoryIds: [ID!]!
+}
+
+"""
+Parameters to be used for the repository_id condition
+"""
+input RepositoryIdConditionTargetInput {
+  """
+  One of these repo IDs must match the repo.
+  """
+  repositoryIds: [ID!]!
+}
+
+"""
+A subset of repository info.
+"""
+interface RepositoryInfo {
+  """
+  Identifies the date and time when the repository was archived.
+  """
+  archivedAt: DateTime
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The description of the repository.
+  """
+  description: String
+
+  """
+  The description of the repository rendered to HTML.
+  """
+  descriptionHTML: HTML!
+
+  """
+  Returns how many forks there are of this repository in the whole network.
+  """
+  forkCount: Int!
+
+  """
+  Indicates if the repository has the Discussions feature enabled.
+  """
+  hasDiscussionsEnabled: Boolean!
+
+  """
+  Indicates if the repository has issues feature enabled.
+  """
+  hasIssuesEnabled: Boolean!
+
+  """
+  Indicates if the repository has the Projects feature enabled.
+  """
+  hasProjectsEnabled: Boolean!
+
+  """
+  Indicates if the repository has wiki feature enabled.
+  """
+  hasWikiEnabled: Boolean!
+
+  """
+  The repository's URL.
+  """
+  homepageUrl: URI
+
+  """
+  Indicates if the repository is unmaintained.
+  """
+  isArchived: Boolean!
+
+  """
+  Identifies if the repository is a fork.
+  """
+  isFork: Boolean!
+
+  """
+  Indicates if a repository is either owned by an organization, or is a private fork of an organization repository.
+  """
+  isInOrganization: Boolean!
+
+  """
+  Indicates if the repository has been locked or not.
+  """
+  isLocked: Boolean!
+
+  """
+  Identifies if the repository is a mirror.
+  """
+  isMirror: Boolean!
+
+  """
+  Identifies if the repository is private or internal.
+  """
+  isPrivate: Boolean!
+
+  """
+  Identifies if the repository is a template that can be used to generate new repositories.
+  """
+  isTemplate: Boolean!
+
+  """
+  The license associated with the repository
+  """
+  licenseInfo: License
+
+  """
+  The reason the repository has been locked.
+  """
+  lockReason: RepositoryLockReason
+
+  """
+  The repository's original mirror URL.
+  """
+  mirrorUrl: URI
+
+  """
+  The name of the repository.
+  """
+  name: String!
+
+  """
+  The repository's name with owner.
+  """
+  nameWithOwner: String!
+
+  """
+  The image used to represent this repository in Open Graph data.
+  """
+  openGraphImageUrl: URI!
+
+  """
+  The User owner of the repository.
+  """
+  owner: RepositoryOwner!
+
+  """
+  Identifies the date and time when the repository was last pushed to.
+  """
+  pushedAt: DateTime
+
+  """
+  The HTTP path for this repository
+  """
+  resourcePath: URI!
+
+  """
+  A description of the repository, rendered to HTML without any links in it.
+  """
+  shortDescriptionHTML(
+    """
+    How many characters to return.
+    """
+    limit: Int = 200
+  ): HTML!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this repository
+  """
+  url: URI!
+
+  """
+  Whether this repository has a custom image to use with Open Graph as opposed to being represented by the owner's avatar.
+  """
+  usesCustomOpenGraphImage: Boolean!
+
+  """
+  Indicates the repository's visibility level.
+  """
+  visibility: RepositoryVisibility!
+}
+
+"""
+Repository interaction limit that applies to this object.
+"""
+type RepositoryInteractionAbility {
+  """
+  The time the currently active limit expires.
+  """
+  expiresAt: DateTime
+
+  """
+  The current limit that is enabled on this object.
+  """
+  limit: RepositoryInteractionLimit!
+
+  """
+  The origin of the currently active interaction limit.
+  """
+  origin: RepositoryInteractionLimitOrigin!
+}
+
+"""
+A repository interaction limit.
+"""
+enum RepositoryInteractionLimit {
+  """
+  Users that are not collaborators will not be able to interact with the repository.
+  """
+  COLLABORATORS_ONLY
+
+  """
+  Users that have not previously committed to a repository’s default branch will be unable to interact with the repository.
+  """
+  CONTRIBUTORS_ONLY
+
+  """
+  Users that have recently created their account will be unable to interact with the repository.
+  """
+  EXISTING_USERS
+
+  """
+  No interaction limits are enabled.
+  """
+  NO_LIMIT
+}
+
+"""
+The length for a repository interaction limit to be enabled for.
+"""
+enum RepositoryInteractionLimitExpiry {
+  """
+  The interaction limit will expire after 1 day.
+  """
+  ONE_DAY
+
+  """
+  The interaction limit will expire after 1 month.
+  """
+  ONE_MONTH
+
+  """
+  The interaction limit will expire after 1 week.
+  """
+  ONE_WEEK
+
+  """
+  The interaction limit will expire after 6 months.
+  """
+  SIX_MONTHS
+
+  """
+  The interaction limit will expire after 3 days.
+  """
+  THREE_DAYS
+}
+
+"""
+Indicates where an interaction limit is configured.
+"""
+enum RepositoryInteractionLimitOrigin {
+  """
+  A limit that is configured at the organization level.
+  """
+  ORGANIZATION
+
+  """
+  A limit that is configured at the repository level.
+  """
+  REPOSITORY
+
+  """
+  A limit that is configured at the user-wide level.
+  """
+  USER
+}
+
+"""
+An invitation for a user to be added to a repository.
+"""
+type RepositoryInvitation implements Node {
+  """
+  The email address that received the invitation.
+  """
+  email: String
+
+  """
+  The Node ID of the RepositoryInvitation object
+  """
+  id: ID!
+
+  """
+  The user who received the invitation.
+  """
+  invitee: User
+
+  """
+  The user who created the invitation.
+  """
+  inviter: User!
+
+  """
+  The permalink for this repository invitation.
+  """
+  permalink: URI!
+
+  """
+  The permission granted on this repository by this invitation.
+  """
+  permission: RepositoryPermission!
+
+  """
+  The Repository the user is invited to.
+  """
+  repository: RepositoryInfo
+}
+
+"""
+A list of repository invitations.
+"""
+type RepositoryInvitationConnection {
+  """
+  A list of edges.
+  """
+  edges: [RepositoryInvitationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [RepositoryInvitation]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type RepositoryInvitationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: RepositoryInvitation
+}
+
+"""
+Ordering options for repository invitation connections.
+"""
+input RepositoryInvitationOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order repository invitations by.
+  """
+  field: RepositoryInvitationOrderField!
+}
+
+"""
+Properties by which repository invitation connections can be ordered.
+"""
+enum RepositoryInvitationOrderField {
+  """
+  Order repository invitations by creation time
+  """
+  CREATED_AT
+}
+
+"""
+The possible reasons a given repository could be in a locked state.
+"""
+enum RepositoryLockReason {
+  """
+  The repository is locked due to a billing related reason.
+  """
+  BILLING
+
+  """
+  The repository is locked due to a migration.
+  """
+  MIGRATING
+
+  """
+  The repository is locked due to a move.
+  """
+  MOVING
+
+  """
+  The repository is locked due to a rename.
+  """
+  RENAME
+
+  """
+  The repository is locked due to a trade controls related reason.
+  """
+  TRADE_RESTRICTION
+
+  """
+  The repository is locked due to an ownership transfer.
+  """
+  TRANSFERRING_OWNERSHIP
+}
+
+"""
+A GitHub Enterprise Importer (GEI) repository migration.
+"""
+type RepositoryMigration implements Migration & Node {
+  """
+  The migration flag to continue on error.
+  """
+  continueOnError: Boolean!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: String
+
+  """
+  The reason the migration failed.
+  """
+  failureReason: String
+
+  """
+  The Node ID of the RepositoryMigration object
+  """
+  id: ID!
+
+  """
+  The URL for the migration log (expires 1 day after migration completes).
+  """
+  migrationLogUrl: URI
+
+  """
+  The migration source.
+  """
+  migrationSource: MigrationSource!
+
+  """
+  The target repository name.
+  """
+  repositoryName: String!
+
+  """
+  The migration source URL, for example `https://github.com` or `https://monalisa.ghe.com`.
+  """
+  sourceUrl: URI!
+
+  """
+  The migration state.
+  """
+  state: MigrationState!
+
+  """
+  The number of warnings encountered for this migration. To review the warnings,
+  check the [Migration Log](https://docs.github.com/en/migrations/using-github-enterprise-importer/completing-your-migration-with-github-enterprise-importer/accessing-your-migration-logs-for-github-enterprise-importer).
+  """
+  warningsCount: Int!
+}
+
+"""
+The connection type for RepositoryMigration.
+"""
+type RepositoryMigrationConnection {
+  """
+  A list of edges.
+  """
+  edges: [RepositoryMigrationEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [RepositoryMigration]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a repository migration.
+"""
+type RepositoryMigrationEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: RepositoryMigration
+}
+
+"""
+Ordering options for repository migrations.
+"""
+input RepositoryMigrationOrder {
+  """
+  The ordering direction.
+  """
+  direction: RepositoryMigrationOrderDirection!
+
+  """
+  The field to order repository migrations by.
+  """
+  field: RepositoryMigrationOrderField!
+}
+
+"""
+Possible directions in which to order a list of repository migrations when provided an `orderBy` argument.
+"""
+enum RepositoryMigrationOrderDirection {
+  """
+  Specifies an ascending order for a given `orderBy` argument.
+  """
+  ASC
+
+  """
+  Specifies a descending order for a given `orderBy` argument.
+  """
+  DESC
+}
+
+"""
+Properties by which repository migrations can be ordered.
+"""
+enum RepositoryMigrationOrderField {
+  """
+  Order mannequins why when they were created.
+  """
+  CREATED_AT
+}
+
+"""
+Parameters to be used for the repository_name condition
+"""
+type RepositoryNameConditionTarget {
+  """
+  Array of repository names or patterns to exclude. The condition will not pass if any of these patterns match.
+  """
+  exclude: [String!]!
+
+  """
+  Array of repository names or patterns to include. One of these patterns must
+  match for the condition to pass. Also accepts `~ALL` to include all repositories.
+  """
+  include: [String!]!
+
+  """
+  Target changes that match these patterns will be prevented except by those with bypass permissions.
+  """
+  protected: Boolean!
+}
+
+"""
+Parameters to be used for the repository_name condition
+"""
+input RepositoryNameConditionTargetInput {
+  """
+  Array of repository names or patterns to exclude. The condition will not pass if any of these patterns match.
+  """
+  exclude: [String!]!
+
+  """
+  Array of repository names or patterns to include. One of these patterns must
+  match for the condition to pass. Also accepts `~ALL` to include all repositories.
+  """
+  include: [String!]!
+
+  """
+  Target changes that match these patterns will be prevented except by those with bypass permissions.
+  """
+  protected: Boolean
+}
+
+"""
+Represents a object that belongs to a repository.
+"""
+interface RepositoryNode {
+  """
+  The repository associated with this node.
+  """
+  repository: Repository!
+}
+
+"""
+Ordering options for repository connections
+"""
+input RepositoryOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order repositories by.
+  """
+  field: RepositoryOrderField!
+}
+
+"""
+Properties by which repository connections can be ordered.
+"""
+enum RepositoryOrderField {
+  """
+  Order repositories by creation time
+  """
+  CREATED_AT
+
+  """
+  Order repositories by name
+  """
+  NAME
+
+  """
+  Order repositories by push time
+  """
+  PUSHED_AT
+
+  """
+  Order repositories by number of stargazers
+  """
+  STARGAZERS
+
+  """
+  Order repositories by update time
+  """
+  UPDATED_AT
+}
+
+"""
+Represents an owner of a Repository.
+"""
+interface RepositoryOwner {
+  """
+  A URL pointing to the owner's public avatar.
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  The Node ID of the RepositoryOwner object
+  """
+  id: ID!
+
+  """
+  The username used to login.
+  """
+  login: String!
+
+  """
+  A list of repositories that the user owns.
+  """
+  repositories(
+    """
+    Array of viewer's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    current viewer owns.
+    """
+    affiliations: [RepositoryAffiliation]
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    If non-null, filters repositories according to whether they have issues enabled
+    """
+    hasIssuesEnabled: Boolean
+
+    """
+    If non-null, filters repositories according to whether they are archived and not maintained
+    """
+    isArchived: Boolean
+
+    """
+    If non-null, filters repositories according to whether they are forks of another repository
+    """
+    isFork: Boolean
+
+    """
+    If non-null, filters repositories according to whether they have been locked
+    """
+    isLocked: Boolean
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories returned from the connection
+    """
+    orderBy: RepositoryOrder
+
+    """
+    Array of owner's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    organization or user being viewed owns.
+    """
+    ownerAffiliations: [RepositoryAffiliation] = [OWNER, COLLABORATOR]
+
+    """
+    If non-null, filters repositories according to privacy
+    """
+    privacy: RepositoryPrivacy
+  ): RepositoryConnection!
+
+  """
+  Find Repository.
+  """
+  repository(
+    """
+    Follow repository renames. If disabled, a repository referenced by its old name will return an error.
+    """
+    followRenames: Boolean = true
+
+    """
+    Name of Repository to find.
+    """
+    name: String!
+  ): Repository
+
+  """
+  The HTTP URL for the owner.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for the owner.
+  """
+  url: URI!
+}
+
+"""
+The access level to a repository
+"""
+enum RepositoryPermission {
+  """
+  Can read, clone, and push to this repository. Can also manage issues, pull
+  requests, and repository settings, including adding collaborators
+  """
+  ADMIN
+
+  """
+  Can read, clone, and push to this repository. They can also manage issues, pull requests, and some repository settings
+  """
+  MAINTAIN
+
+  """
+  Can read and clone this repository. Can also open and comment on issues and pull requests
+  """
+  READ
+
+  """
+  Can read and clone this repository. Can also manage issues and pull requests
+  """
+  TRIAGE
+
+  """
+  Can read, clone, and push to this repository. Can also manage issues and pull requests
+  """
+  WRITE
+}
+
+"""
+The privacy of a repository
+"""
+enum RepositoryPrivacy {
+  """
+  Private
+  """
+  PRIVATE
+
+  """
+  Public
+  """
+  PUBLIC
+}
+
+"""
+A repository rule.
+"""
+type RepositoryRule implements Node {
+  """
+  The Node ID of the RepositoryRule object
+  """
+  id: ID!
+
+  """
+  The parameters for this rule.
+  """
+  parameters: RuleParameters
+
+  """
+  The repository ruleset associated with this rule configuration
+  """
+  repositoryRuleset: RepositoryRuleset
+
+  """
+  The type of rule.
+  """
+  type: RepositoryRuleType!
+}
+
+"""
+Set of conditions that determine if a ruleset will evaluate
+"""
+type RepositoryRuleConditions {
+  """
+  Configuration for the ref_name condition
+  """
+  refName: RefNameConditionTarget
+
+  """
+  Configuration for the repository_id condition
+  """
+  repositoryId: RepositoryIdConditionTarget
+
+  """
+  Configuration for the repository_name condition
+  """
+  repositoryName: RepositoryNameConditionTarget
+}
+
+"""
+Specifies the conditions required for a ruleset to evaluate
+"""
+input RepositoryRuleConditionsInput {
+  """
+  Configuration for the ref_name condition
+  """
+  refName: RefNameConditionTargetInput
+
+  """
+  Configuration for the repository_id condition
+  """
+  repositoryId: RepositoryIdConditionTargetInput
+
+  """
+  Configuration for the repository_name condition
+  """
+  repositoryName: RepositoryNameConditionTargetInput
+}
+
+"""
+The connection type for RepositoryRule.
+"""
+type RepositoryRuleConnection {
+  """
+  A list of edges.
+  """
+  edges: [RepositoryRuleEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [RepositoryRule]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type RepositoryRuleEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: RepositoryRule
+}
+
+"""
+Specifies the attributes for a new or updated rule.
+"""
+input RepositoryRuleInput {
+  """
+  Optional ID of this rule when updating
+  """
+  id: ID @possibleTypes(concreteTypes: ["RepositoryRule"])
+
+  """
+  The parameters for the rule.
+  """
+  parameters: RuleParametersInput
+
+  """
+  The type of rule to create.
+  """
+  type: RepositoryRuleType!
+}
+
+"""
+The rule types supported in rulesets
+"""
+enum RepositoryRuleType {
+  """
+  Authorization
+  """
+  AUTHORIZATION
+
+  """
+  Branch name pattern
+  """
+  BRANCH_NAME_PATTERN
+
+  """
+  Committer email pattern
+  """
+  COMMITTER_EMAIL_PATTERN
+
+  """
+  Commit author email pattern
+  """
+  COMMIT_AUTHOR_EMAIL_PATTERN
+
+  """
+  Commit message pattern
+  """
+  COMMIT_MESSAGE_PATTERN
+
+  """
+  Only allow users with bypass permission to create matching refs.
+  """
+  CREATION
+
+  """
+  Only allow users with bypass permissions to delete matching refs.
+  """
+  DELETION
+
+  """
+  Branch is read-only. Users cannot push to the branch.
+  """
+  LOCK_BRANCH
+
+  """
+  Max ref updates
+  """
+  MAX_REF_UPDATES
+
+  """
+  Merges must be performed via a merge queue.
+  """
+  MERGE_QUEUE
+
+  """
+  Merge queue locked ref
+  """
+  MERGE_QUEUE_LOCKED_REF
+
+  """
+  Prevent users with push access from force pushing to refs.
+  """
+  NON_FAST_FORWARD
+
+  """
+  Require all commits be made to a non-target branch and submitted via a pull request before they can be merged.
+  """
+  PULL_REQUEST
+
+  """
+  Choose which environments must be successfully deployed to before refs can be pushed into a ref that matches this rule.
+  """
+  REQUIRED_DEPLOYMENTS
+
+  """
+  Prevent merge commits from being pushed to matching refs.
+  """
+  REQUIRED_LINEAR_HISTORY
+
+  """
+  When enabled, all conversations on code must be resolved before a pull request
+  can be merged into a branch that matches this rule.
+  """
+  REQUIRED_REVIEW_THREAD_RESOLUTION
+
+  """
+  Commits pushed to matching refs must have verified signatures.
+  """
+  REQUIRED_SIGNATURES
+
+  """
+  Choose which status checks must pass before the ref is updated. When enabled,
+  commits must first be pushed to another ref where the checks pass.
+  """
+  REQUIRED_STATUS_CHECKS
+
+  """
+  Require all commits be made to a non-target branch and submitted via a pull
+  request and required workflow checks to pass before they can be merged.
+  """
+  REQUIRED_WORKFLOW_STATUS_CHECKS
+
+  """
+  Commits pushed to matching refs must have verified signatures.
+  """
+  RULESET_REQUIRED_SIGNATURES
+
+  """
+  Secret scanning
+  """
+  SECRET_SCANNING
+
+  """
+  Tag
+  """
+  TAG
+
+  """
+  Tag name pattern
+  """
+  TAG_NAME_PATTERN
+
+  """
+  Only allow users with bypass permission to update matching refs.
+  """
+  UPDATE
+
+  """
+  Require all changes made to a targeted branch to pass the specified workflows before they can be merged.
+  """
+  WORKFLOWS
+
+  """
+  Workflow files cannot be modified.
+  """
+  WORKFLOW_UPDATES
+}
+
+"""
+A repository ruleset.
+"""
+type RepositoryRuleset implements Node {
+  """
+  The actors that can bypass this ruleset
+  """
+  bypassActors(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): RepositoryRulesetBypassActorConnection
+
+  """
+  The set of conditions that must evaluate to true for this ruleset to apply
+  """
+  conditions: RepositoryRuleConditions!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The enforcement level of this ruleset
+  """
+  enforcement: RuleEnforcement!
+
+  """
+  The Node ID of the RepositoryRuleset object
+  """
+  id: ID!
+
+  """
+  Name of the ruleset.
+  """
+  name: String!
+
+  """
+  List of rules.
+  """
+  rules(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    The type of rule.
+    """
+    type: RepositoryRuleType
+  ): RepositoryRuleConnection
+
+  """
+  Source of ruleset.
+  """
+  source: RuleSource!
+
+  """
+  Target of the ruleset.
+  """
+  target: RepositoryRulesetTarget
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+A team or app that has the ability to bypass a rules defined on a ruleset
+"""
+type RepositoryRulesetBypassActor implements Node {
+  """
+  The actor that can bypass rules.
+  """
+  actor: BypassActor
+
+  """
+  The mode for the bypass actor
+  """
+  bypassMode: RepositoryRulesetBypassActorBypassMode
+
+  """
+  The Node ID of the RepositoryRulesetBypassActor object
+  """
+  id: ID!
+
+  """
+  This actor represents the ability for an organization owner to bypass
+  """
+  organizationAdmin: Boolean!
+
+  """
+  If the actor is a repository role, the repository role's ID that can bypass
+  """
+  repositoryRoleDatabaseId: Int
+
+  """
+  If the actor is a repository role, the repository role's name that can bypass
+  """
+  repositoryRoleName: String
+
+  """
+  Identifies the ruleset associated with the allowed actor
+  """
+  repositoryRuleset: RepositoryRuleset
+}
+
+"""
+The bypass mode for a specific actor on a ruleset.
+"""
+enum RepositoryRulesetBypassActorBypassMode {
+  """
+  The actor can always bypass rules
+  """
+  ALWAYS
+
+  """
+  The actor can only bypass rules via a pull request
+  """
+  PULL_REQUEST
+}
+
+"""
+The connection type for RepositoryRulesetBypassActor.
+"""
+type RepositoryRulesetBypassActorConnection {
+  """
+  A list of edges.
+  """
+  edges: [RepositoryRulesetBypassActorEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [RepositoryRulesetBypassActor]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type RepositoryRulesetBypassActorEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: RepositoryRulesetBypassActor
+}
+
+"""
+Specifies the attributes for a new or updated ruleset bypass actor. Only one of
+`actor_id`, `repository_role_database_id`, or `organization_admin` should be specified.
+"""
+input RepositoryRulesetBypassActorInput {
+  """
+  For Team and Integration bypasses, the Team or Integration ID
+  """
+  actorId: ID
+
+  """
+  The bypass mode for this actor.
+  """
+  bypassMode: RepositoryRulesetBypassActorBypassMode!
+
+  """
+  For organization owner bypasses, true
+  """
+  organizationAdmin: Boolean
+
+  """
+  For role bypasses, the role database ID
+  """
+  repositoryRoleDatabaseId: Int
+}
+
+"""
+The connection type for RepositoryRuleset.
+"""
+type RepositoryRulesetConnection {
+  """
+  A list of edges.
+  """
+  edges: [RepositoryRulesetEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [RepositoryRuleset]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type RepositoryRulesetEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: RepositoryRuleset
+}
+
+"""
+The targets supported for rulesets
+"""
+enum RepositoryRulesetTarget {
+  """
+  Branch
+  """
+  BRANCH
+
+  """
+  Tag
+  """
+  TAG
+}
+
+"""
+A repository-topic connects a repository to a topic.
+"""
+type RepositoryTopic implements Node & UniformResourceLocatable {
+  """
+  The Node ID of the RepositoryTopic object
+  """
+  id: ID!
+
+  """
+  The HTTP path for this repository-topic.
+  """
+  resourcePath: URI!
+
+  """
+  The topic.
+  """
+  topic: Topic!
+
+  """
+  The HTTP URL for this repository-topic.
+  """
+  url: URI!
+}
+
+"""
+The connection type for RepositoryTopic.
+"""
+type RepositoryTopicConnection {
+  """
+  A list of edges.
+  """
+  edges: [RepositoryTopicEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [RepositoryTopic]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type RepositoryTopicEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: RepositoryTopic
+}
+
+"""
+The repository's visibility level.
+"""
+enum RepositoryVisibility {
+  """
+  The repository is visible only to users in the same business.
+  """
+  INTERNAL
+
+  """
+  The repository is visible only to those with explicit access.
+  """
+  PRIVATE
+
+  """
+  The repository is visible to everyone.
+  """
+  PUBLIC
+}
+
+"""
+Audit log entry for a repository_visibility_change.disable event.
+"""
+type RepositoryVisibilityChangeDisableAuditEntry implements AuditEntry & EnterpriseAuditEntryData & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The HTTP path for this enterprise.
+  """
+  enterpriseResourcePath: URI
+
+  """
+  The slug of the enterprise.
+  """
+  enterpriseSlug: String
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  enterpriseUrl: URI
+
+  """
+  The Node ID of the RepositoryVisibilityChangeDisableAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a repository_visibility_change.enable event.
+"""
+type RepositoryVisibilityChangeEnableAuditEntry implements AuditEntry & EnterpriseAuditEntryData & Node & OrganizationAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The HTTP path for this enterprise.
+  """
+  enterpriseResourcePath: URI
+
+  """
+  The slug of the enterprise.
+  """
+  enterpriseSlug: String
+
+  """
+  The HTTP URL for this enterprise.
+  """
+  enterpriseUrl: URI
+
+  """
+  The Node ID of the RepositoryVisibilityChangeEnableAuditEntry object
+  """
+  id: ID!
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+A Dependabot alert for a repository with a dependency affected by a security vulnerability.
+"""
+type RepositoryVulnerabilityAlert implements Node & RepositoryNode {
+  """
+  When was the alert auto-dismissed?
+  """
+  autoDismissedAt: DateTime
+
+  """
+  When was the alert created?
+  """
+  createdAt: DateTime!
+
+  """
+  The associated Dependabot update
+  """
+  dependabotUpdate: DependabotUpdate
+
+  """
+  The scope of an alert's dependency
+  """
+  dependencyScope: RepositoryVulnerabilityAlertDependencyScope
+
+  """
+  Comment explaining the reason the alert was dismissed
+  """
+  dismissComment: String
+
+  """
+  The reason the alert was dismissed
+  """
+  dismissReason: String
+
+  """
+  When was the alert dismissed?
+  """
+  dismissedAt: DateTime
+
+  """
+  The user who dismissed the alert
+  """
+  dismisser: User
+
+  """
+  When was the alert fixed?
+  """
+  fixedAt: DateTime
+
+  """
+  The Node ID of the RepositoryVulnerabilityAlert object
+  """
+  id: ID!
+
+  """
+  Identifies the alert number.
+  """
+  number: Int!
+
+  """
+  The associated repository
+  """
+  repository: Repository!
+
+  """
+  The associated security advisory
+  """
+  securityAdvisory: SecurityAdvisory
+
+  """
+  The associated security vulnerability
+  """
+  securityVulnerability: SecurityVulnerability
+
+  """
+  Identifies the state of the alert.
+  """
+  state: RepositoryVulnerabilityAlertState!
+
+  """
+  The vulnerable manifest filename
+  """
+  vulnerableManifestFilename: String!
+
+  """
+  The vulnerable manifest path
+  """
+  vulnerableManifestPath: String!
+
+  """
+  The vulnerable requirements
+  """
+  vulnerableRequirements: String
+}
+
+"""
+The connection type for RepositoryVulnerabilityAlert.
+"""
+type RepositoryVulnerabilityAlertConnection {
+  """
+  A list of edges.
+  """
+  edges: [RepositoryVulnerabilityAlertEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [RepositoryVulnerabilityAlert]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+The possible scopes of an alert's dependency.
+"""
+enum RepositoryVulnerabilityAlertDependencyScope {
+  """
+  A dependency that is only used in development
+  """
+  DEVELOPMENT
+
+  """
+  A dependency that is leveraged during application runtime
+  """
+  RUNTIME
+}
+
+"""
+An edge in a connection.
+"""
+type RepositoryVulnerabilityAlertEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: RepositoryVulnerabilityAlert
+}
+
+"""
+The possible states of an alert
+"""
+enum RepositoryVulnerabilityAlertState {
+  """
+  An alert that has been automatically closed by Dependabot.
+  """
+  AUTO_DISMISSED
+
+  """
+  An alert that has been manually closed by a user.
+  """
+  DISMISSED
+
+  """
+  An alert that has been resolved by a code change.
+  """
+  FIXED
+
+  """
+  An alert that is still open.
+  """
+  OPEN
+}
+
+"""
+Autogenerated input type of RequestReviews
+"""
+input RequestReviewsInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the pull request to modify.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+
+  """
+  The Node IDs of the team to request.
+  """
+  teamIds: [ID!] @possibleTypes(concreteTypes: ["Team"])
+
+  """
+  Add users to the set rather than replace.
+  """
+  union: Boolean = false
+
+  """
+  The Node IDs of the user to request.
+  """
+  userIds: [ID!] @possibleTypes(concreteTypes: ["User"])
+}
+
+"""
+Autogenerated return type of RequestReviews
+"""
+type RequestReviewsPayload {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request that is getting requests.
+  """
+  pullRequest: PullRequest
+
+  """
+  The edge from the pull request to the requested reviewers.
+  """
+  requestedReviewersEdge: UserEdge
+}
+
+"""
+The possible states that can be requested when creating a check run.
+"""
+enum RequestableCheckStatusState {
+  """
+  The check suite or run has been completed.
+  """
+  COMPLETED
+
+  """
+  The check suite or run is in progress.
+  """
+  IN_PROGRESS
+
+  """
+  The check suite or run is in pending state.
+  """
+  PENDING
+
+  """
+  The check suite or run has been queued.
+  """
+  QUEUED
+
+  """
+  The check suite or run is in waiting state.
+  """
+  WAITING
+}
+
+"""
+Types that can be requested reviewers.
+"""
+union RequestedReviewer = Bot | Mannequin | Team | User
+
+"""
+The connection type for RequestedReviewer.
+"""
+type RequestedReviewerConnection {
+  """
+  A list of edges.
+  """
+  edges: [RequestedReviewerEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [RequestedReviewer]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type RequestedReviewerEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: RequestedReviewer
+}
+
+"""
+Represents a type that can be required by a pull request for merging.
+"""
+interface RequirableByPullRequest {
+  """
+  Whether this is required to pass before merging for a specific pull request.
+  """
+  isRequired(
+    """
+    The id of the pull request this is required for
+    """
+    pullRequestId: ID
+
+    """
+    The number of the pull request this is required for
+    """
+    pullRequestNumber: Int
+  ): Boolean!
+}
+
+"""
+Choose which environments must be successfully deployed to before refs can be pushed into a ref that matches this rule.
+"""
+type RequiredDeploymentsParameters {
+  """
+  The environments that must be successfully deployed to before branches can be merged.
+  """
+  requiredDeploymentEnvironments: [String!]!
+}
+
+"""
+Choose which environments must be successfully deployed to before refs can be pushed into a ref that matches this rule.
+"""
+input RequiredDeploymentsParametersInput {
+  """
+  The environments that must be successfully deployed to before branches can be merged.
+  """
+  requiredDeploymentEnvironments: [String!]!
+}
+
+"""
+Represents a required status check for a protected branch, but not any specific run of that check.
+"""
+type RequiredStatusCheckDescription {
+  """
+  The App that must provide this status in order for it to be accepted.
+  """
+  app: App
+
+  """
+  The name of this status.
+  """
+  context: String!
+}
+
+"""
+Specifies the attributes for a new or updated required status check.
+"""
+input RequiredStatusCheckInput {
+  """
+  The ID of the App that must set the status in order for it to be accepted.
+  Omit this value to use whichever app has recently been setting this status, or
+  use "any" to allow any app to set the status.
+  """
+  appId: ID
+
+  """
+  Status check context that must pass for commits to be accepted to the matching branch.
+  """
+  context: String!
+}
+
+"""
+Choose which status checks must pass before the ref is updated. When enabled,
+commits must first be pushed to another ref where the checks pass.
+"""
+type RequiredStatusChecksParameters {
+  """
+  Status checks that are required.
+  """
+  requiredStatusChecks: [StatusCheckConfiguration!]!
+
+  """
+  Whether pull requests targeting a matching branch must be tested with the
+  latest code. This setting will not take effect unless at least one status
+  check is enabled.
+  """
+  strictRequiredStatusChecksPolicy: Boolean!
+}
+
+"""
+Choose which status checks must pass before the ref is updated. When enabled,
+commits must first be pushed to another ref where the checks pass.
+"""
+input RequiredStatusChecksParametersInput {
+  """
+  Status checks that are required.
+  """
+  requiredStatusChecks: [StatusCheckConfigurationInput!]!
+
+  """
+  Whether pull requests targeting a matching branch must be tested with the
+  latest code. This setting will not take effect unless at least one status
+  check is enabled.
+  """
+  strictRequiredStatusChecksPolicy: Boolean!
+}
+
+"""
+Autogenerated input type of RerequestCheckSuite
+"""
+input RerequestCheckSuiteInput {
+  """
+  The Node ID of the check suite.
+  """
+  checkSuiteId: ID! @possibleTypes(concreteTypes: ["CheckSuite"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of RerequestCheckSuite
+"""
+type RerequestCheckSuitePayload {
+  """
+  The requested check suite.
+  """
+  checkSuite: CheckSuite
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of ResolveReviewThread
+"""
+input ResolveReviewThreadInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the thread to resolve
+  """
+  threadId: ID! @possibleTypes(concreteTypes: ["PullRequestReviewThread"])
+}
+
+"""
+Autogenerated return type of ResolveReviewThread
+"""
+type ResolveReviewThreadPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The thread to resolve.
+  """
+  thread: PullRequestReviewThread
+}
+
+"""
+Represents a private contribution a user made on GitHub.
+"""
+type RestrictedContribution implements Contribution {
+  """
+  Whether this contribution is associated with a record you do not have access to. For
+  example, your own 'first issue' contribution may have been made on a repository you can no
+  longer access.
+  """
+  isRestricted: Boolean!
+
+  """
+  When this contribution was made.
+  """
+  occurredAt: DateTime!
+
+  """
+  The HTTP path for this contribution.
+  """
+  resourcePath: URI!
+
+  """
+  The HTTP URL for this contribution.
+  """
+  url: URI!
+
+  """
+  The user who made this contribution.
+  """
+  user: User!
+}
+
+"""
+Autogenerated input type of RetireSponsorsTier
+"""
+input RetireSponsorsTierInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the published tier to retire.
+  """
+  tierId: ID! @possibleTypes(concreteTypes: ["SponsorsTier"])
+}
+
+"""
+Autogenerated return type of RetireSponsorsTier
+"""
+type RetireSponsorsTierPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The tier that was retired.
+  """
+  sponsorsTier: SponsorsTier
+}
+
+"""
+Autogenerated input type of RevertPullRequest
+"""
+input RevertPullRequestInput {
+  """
+  The description of the revert pull request.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Indicates whether the revert pull request should be a draft.
+  """
+  draft: Boolean = false
+
+  """
+  The ID of the pull request to revert.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+
+  """
+  The title of the revert pull request.
+  """
+  title: String
+}
+
+"""
+Autogenerated return type of RevertPullRequest
+"""
+type RevertPullRequestPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The pull request that was reverted.
+  """
+  pullRequest: PullRequest
+
+  """
+  The new pull request that reverts the input pull request.
+  """
+  revertPullRequest: PullRequest
+}
+
+"""
+A user, team, or app who has the ability to dismiss a review on a protected branch.
+"""
+type ReviewDismissalAllowance implements Node {
+  """
+  The actor that can dismiss.
+  """
+  actor: ReviewDismissalAllowanceActor
+
+  """
+  Identifies the branch protection rule associated with the allowed user, team, or app.
+  """
+  branchProtectionRule: BranchProtectionRule
+
+  """
+  The Node ID of the ReviewDismissalAllowance object
+  """
+  id: ID!
+}
+
+"""
+Types that can be an actor.
+"""
+union ReviewDismissalAllowanceActor = App | Team | User
+
+"""
+The connection type for ReviewDismissalAllowance.
+"""
+type ReviewDismissalAllowanceConnection {
+  """
+  A list of edges.
+  """
+  edges: [ReviewDismissalAllowanceEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ReviewDismissalAllowance]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ReviewDismissalAllowanceEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ReviewDismissalAllowance
+}
+
+"""
+Represents a 'review_dismissed' event on a given issue or pull request.
+"""
+type ReviewDismissedEvent implements Node & UniformResourceLocatable {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  Identifies the optional message associated with the 'review_dismissed' event.
+  """
+  dismissalMessage: String
+
+  """
+  Identifies the optional message associated with the event, rendered to HTML.
+  """
+  dismissalMessageHTML: String
+
+  """
+  The Node ID of the ReviewDismissedEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the previous state of the review with the 'review_dismissed' event.
+  """
+  previousReviewState: PullRequestReviewState!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+
+  """
+  Identifies the commit which caused the review to become stale.
+  """
+  pullRequestCommit: PullRequestCommit
+
+  """
+  The HTTP path for this review dismissed event.
+  """
+  resourcePath: URI!
+
+  """
+  Identifies the review associated with the 'review_dismissed' event.
+  """
+  review: PullRequestReview
+
+  """
+  The HTTP URL for this review dismissed event.
+  """
+  url: URI!
+}
+
+"""
+A request for a user to review a pull request.
+"""
+type ReviewRequest implements Node {
+  """
+  Whether this request was created for a code owner
+  """
+  asCodeOwner: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the ReviewRequest object
+  """
+  id: ID!
+
+  """
+  Identifies the pull request associated with this review request.
+  """
+  pullRequest: PullRequest!
+
+  """
+  The reviewer that is requested.
+  """
+  requestedReviewer: RequestedReviewer
+}
+
+"""
+The connection type for ReviewRequest.
+"""
+type ReviewRequestConnection {
+  """
+  A list of edges.
+  """
+  edges: [ReviewRequestEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [ReviewRequest]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type ReviewRequestEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: ReviewRequest
+}
+
+"""
+Represents an 'review_request_removed' event on a given pull request.
+"""
+type ReviewRequestRemovedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the ReviewRequestRemovedEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+
+  """
+  Identifies the reviewer whose review request was removed.
+  """
+  requestedReviewer: RequestedReviewer
+}
+
+"""
+Represents an 'review_requested' event on a given pull request.
+"""
+type ReviewRequestedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the ReviewRequestedEvent object
+  """
+  id: ID!
+
+  """
+  PullRequest referenced by event.
+  """
+  pullRequest: PullRequest!
+
+  """
+  Identifies the reviewer whose review was requested.
+  """
+  requestedReviewer: RequestedReviewer
+}
+
+"""
+A hovercard context with a message describing the current code review state of the pull
+request.
+"""
+type ReviewStatusHovercardContext implements HovercardContext {
+  """
+  A string describing this context
+  """
+  message: String!
+
+  """
+  An octicon to accompany this context
+  """
+  octicon: String!
+
+  """
+  The current status of the pull request with respect to code review.
+  """
+  reviewDecision: PullRequestReviewDecision
+}
+
+"""
+Autogenerated input type of RevokeEnterpriseOrganizationsMigratorRole
+"""
+input RevokeEnterpriseOrganizationsMigratorRoleInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise to which all organizations managed by it will be granted the migrator role.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The login of the user to revoke the migrator role
+  """
+  login: String!
+}
+
+"""
+Autogenerated return type of RevokeEnterpriseOrganizationsMigratorRole
+"""
+type RevokeEnterpriseOrganizationsMigratorRolePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The organizations that had the migrator role revoked for the given user.
+  """
+  organizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): OrganizationConnection
+}
+
+"""
+Autogenerated input type of RevokeMigratorRole
+"""
+input RevokeMigratorRoleInput {
+  """
+  The user login or Team slug to revoke the migrator role from.
+  """
+  actor: String!
+
+  """
+  Specifies the type of the actor, can be either USER or TEAM.
+  """
+  actorType: ActorType!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the organization that the user/team belongs to.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of RevokeMigratorRole
+"""
+type RevokeMigratorRolePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Did the operation succeed?
+  """
+  success: Boolean
+}
+
+"""
+Possible roles a user may have in relation to an organization.
+"""
+enum RoleInOrganization {
+  """
+  A user who is a direct member of the organization.
+  """
+  DIRECT_MEMBER
+
+  """
+  A user with full administrative access to the organization.
+  """
+  OWNER
+
+  """
+  A user who is unaffiliated with the organization.
+  """
+  UNAFFILIATED
+}
+
+"""
+The level of enforcement for a rule or ruleset.
+"""
+enum RuleEnforcement {
+  """
+  Rules will be enforced
+  """
+  ACTIVE
+
+  """
+  Do not evaluate or enforce rules
+  """
+  DISABLED
+
+  """
+  Allow admins to test rules before enforcing them. Admins can view insights on
+  the Rule Insights page (`evaluate` is only available with GitHub Enterprise).
+  """
+  EVALUATE
+}
+
+"""
+Types which can be parameters for `RepositoryRule` objects.
+"""
+union RuleParameters =
+    BranchNamePatternParameters
+  | CommitAuthorEmailPatternParameters
+  | CommitMessagePatternParameters
+  | CommitterEmailPatternParameters
+  | PullRequestParameters
+  | RequiredDeploymentsParameters
+  | RequiredStatusChecksParameters
+  | TagNamePatternParameters
+  | UpdateParameters
+  | WorkflowsParameters
+
+"""
+Specifies the parameters for a `RepositoryRule` object. Only one of the fields should be specified.
+"""
+input RuleParametersInput {
+  """
+  Parameters used for the `branch_name_pattern` rule type
+  """
+  branchNamePattern: BranchNamePatternParametersInput
+
+  """
+  Parameters used for the `commit_author_email_pattern` rule type
+  """
+  commitAuthorEmailPattern: CommitAuthorEmailPatternParametersInput
+
+  """
+  Parameters used for the `commit_message_pattern` rule type
+  """
+  commitMessagePattern: CommitMessagePatternParametersInput
+
+  """
+  Parameters used for the `committer_email_pattern` rule type
+  """
+  committerEmailPattern: CommitterEmailPatternParametersInput
+
+  """
+  Parameters used for the `pull_request` rule type
+  """
+  pullRequest: PullRequestParametersInput
+
+  """
+  Parameters used for the `required_deployments` rule type
+  """
+  requiredDeployments: RequiredDeploymentsParametersInput
+
+  """
+  Parameters used for the `required_status_checks` rule type
+  """
+  requiredStatusChecks: RequiredStatusChecksParametersInput
+
+  """
+  Parameters used for the `tag_name_pattern` rule type
+  """
+  tagNamePattern: TagNamePatternParametersInput
+
+  """
+  Parameters used for the `update` rule type
+  """
+  update: UpdateParametersInput
+
+  """
+  Parameters used for the `workflows` rule type
+  """
+  workflows: WorkflowsParametersInput
+}
+
+"""
+Types which can have `RepositoryRule` objects.
+"""
+union RuleSource = Organization | Repository
+
+"""
+The possible digest algorithms used to sign SAML requests for an identity provider.
+"""
+enum SamlDigestAlgorithm {
+  """
+  SHA1
+  """
+  SHA1
+
+  """
+  SHA256
+  """
+  SHA256
+
+  """
+  SHA384
+  """
+  SHA384
+
+  """
+  SHA512
+  """
+  SHA512
+}
+
+"""
+The possible signature algorithms used to sign SAML requests for a Identity Provider.
+"""
+enum SamlSignatureAlgorithm {
+  """
+  RSA-SHA1
+  """
+  RSA_SHA1
+
+  """
+  RSA-SHA256
+  """
+  RSA_SHA256
+
+  """
+  RSA-SHA384
+  """
+  RSA_SHA384
+
+  """
+  RSA-SHA512
+  """
+  RSA_SHA512
+}
+
+"""
+A Saved Reply is text a user can use to reply quickly.
+"""
+type SavedReply implements Node {
+  """
+  The body of the saved reply.
+  """
+  body: String!
+
+  """
+  The saved reply body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the SavedReply object
+  """
+  id: ID!
+
+  """
+  The title of the saved reply.
+  """
+  title: String!
+
+  """
+  The user that saved this reply.
+  """
+  user: Actor
+}
+
+"""
+The connection type for SavedReply.
+"""
+type SavedReplyConnection {
+  """
+  A list of edges.
+  """
+  edges: [SavedReplyEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [SavedReply]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SavedReplyEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: SavedReply
+}
+
+"""
+Ordering options for saved reply connections.
+"""
+input SavedReplyOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order saved replies by.
+  """
+  field: SavedReplyOrderField!
+}
+
+"""
+Properties by which saved reply connections can be ordered.
+"""
+enum SavedReplyOrderField {
+  """
+  Order saved reply by when they were updated.
+  """
+  UPDATED_AT
+}
+
+"""
+The results of a search.
+"""
+union SearchResultItem = App | Discussion | Issue | MarketplaceListing | Organization | PullRequest | Repository | User
+
+"""
+A list of results that matched against a search query. Regardless of the number
+of matches, a maximum of 1,000 results will be available across all types,
+potentially split across many pages.
+"""
+type SearchResultItemConnection {
+  """
+  The total number of pieces of code that matched the search query. Regardless
+  of the total number of matches, a maximum of 1,000 results will be available
+  across all types.
+  """
+  codeCount: Int!
+
+  """
+  The total number of discussions that matched the search query. Regardless of
+  the total number of matches, a maximum of 1,000 results will be available
+  across all types.
+  """
+  discussionCount: Int!
+
+  """
+  A list of edges.
+  """
+  edges: [SearchResultItemEdge]
+
+  """
+  The total number of issues that matched the search query. Regardless of the
+  total number of matches, a maximum of 1,000 results will be available across all types.
+  """
+  issueCount: Int!
+
+  """
+  A list of nodes.
+  """
+  nodes: [SearchResultItem]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  The total number of repositories that matched the search query. Regardless of
+  the total number of matches, a maximum of 1,000 results will be available
+  across all types.
+  """
+  repositoryCount: Int!
+
+  """
+  The total number of users that matched the search query. Regardless of the
+  total number of matches, a maximum of 1,000 results will be available across all types.
+  """
+  userCount: Int!
+
+  """
+  The total number of wiki pages that matched the search query. Regardless of
+  the total number of matches, a maximum of 1,000 results will be available
+  across all types.
+  """
+  wikiCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SearchResultItemEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: SearchResultItem
+
+  """
+  Text matches on the result found.
+  """
+  textMatches: [TextMatch]
+}
+
+"""
+Represents the individual results of a search.
+"""
+enum SearchType {
+  """
+  Returns matching discussions in repositories.
+  """
+  DISCUSSION
+
+  """
+  Returns results matching issues in repositories.
+  """
+  ISSUE
+
+  """
+  Returns results matching repositories.
+  """
+  REPOSITORY
+
+  """
+  Returns results matching users and organizations on GitHub.
+  """
+  USER
+}
+
+"""
+A GitHub Security Advisory
+"""
+type SecurityAdvisory implements Node {
+  """
+  The classification of the advisory
+  """
+  classification: SecurityAdvisoryClassification!
+
+  """
+  The CVSS associated with this advisory
+  """
+  cvss: CVSS!
+
+  """
+  CWEs associated with this Advisory
+  """
+  cwes(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CWEConnection!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  This is a long plaintext description of the advisory
+  """
+  description: String!
+
+  """
+  The GitHub Security Advisory ID
+  """
+  ghsaId: String!
+
+  """
+  The Node ID of the SecurityAdvisory object
+  """
+  id: ID!
+
+  """
+  A list of identifiers for this advisory
+  """
+  identifiers: [SecurityAdvisoryIdentifier!]!
+
+  """
+  The permalink for the advisory's dependabot alerts page
+  """
+  notificationsPermalink: URI
+
+  """
+  The organization that originated the advisory
+  """
+  origin: String!
+
+  """
+  The permalink for the advisory
+  """
+  permalink: URI
+
+  """
+  When the advisory was published
+  """
+  publishedAt: DateTime!
+
+  """
+  A list of references for this advisory
+  """
+  references: [SecurityAdvisoryReference!]!
+
+  """
+  The severity of the advisory
+  """
+  severity: SecurityAdvisorySeverity!
+
+  """
+  A short plaintext summary of the advisory
+  """
+  summary: String!
+
+  """
+  When the advisory was last updated
+  """
+  updatedAt: DateTime!
+
+  """
+  Vulnerabilities associated with this Advisory
+  """
+  vulnerabilities(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    A list of advisory classifications to filter vulnerabilities by.
+    """
+    classifications: [SecurityAdvisoryClassification!]
+
+    """
+    An ecosystem to filter vulnerabilities by.
+    """
+    ecosystem: SecurityAdvisoryEcosystem
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the returned topics.
+    """
+    orderBy: SecurityVulnerabilityOrder = {field: UPDATED_AT, direction: DESC}
+
+    """
+    A package name to filter vulnerabilities by.
+    """
+    package: String
+
+    """
+    A list of severities to filter vulnerabilities by.
+    """
+    severities: [SecurityAdvisorySeverity!]
+  ): SecurityVulnerabilityConnection!
+
+  """
+  When the advisory was withdrawn, if it has been withdrawn
+  """
+  withdrawnAt: DateTime
+}
+
+"""
+Classification of the advisory.
+"""
+enum SecurityAdvisoryClassification {
+  """
+  Classification of general advisories.
+  """
+  GENERAL
+
+  """
+  Classification of malware advisories.
+  """
+  MALWARE
+}
+
+"""
+The connection type for SecurityAdvisory.
+"""
+type SecurityAdvisoryConnection {
+  """
+  A list of edges.
+  """
+  edges: [SecurityAdvisoryEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [SecurityAdvisory]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+The possible ecosystems of a security vulnerability's package.
+"""
+enum SecurityAdvisoryEcosystem {
+  """
+  GitHub Actions
+  """
+  ACTIONS
+
+  """
+  PHP packages hosted at packagist.org
+  """
+  COMPOSER
+
+  """
+  Erlang/Elixir packages hosted at hex.pm
+  """
+  ERLANG
+
+  """
+  Go modules
+  """
+  GO
+
+  """
+  Java artifacts hosted at the Maven central repository
+  """
+  MAVEN
+
+  """
+  JavaScript packages hosted at npmjs.com
+  """
+  NPM
+
+  """
+  .NET packages hosted at the NuGet Gallery
+  """
+  NUGET
+
+  """
+  Python packages hosted at PyPI.org
+  """
+  PIP
+
+  """
+  Dart packages hosted at pub.dev
+  """
+  PUB
+
+  """
+  Ruby gems hosted at RubyGems.org
+  """
+  RUBYGEMS
+
+  """
+  Rust crates
+  """
+  RUST
+
+  """
+  Swift packages
+  """
+  SWIFT
+}
+
+"""
+An edge in a connection.
+"""
+type SecurityAdvisoryEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: SecurityAdvisory
+}
+
+"""
+A GitHub Security Advisory Identifier
+"""
+type SecurityAdvisoryIdentifier {
+  """
+  The identifier type, e.g. GHSA, CVE
+  """
+  type: String!
+
+  """
+  The identifier
+  """
+  value: String!
+}
+
+"""
+An advisory identifier to filter results on.
+"""
+input SecurityAdvisoryIdentifierFilter {
+  """
+  The identifier type.
+  """
+  type: SecurityAdvisoryIdentifierType!
+
+  """
+  The identifier string. Supports exact or partial matching.
+  """
+  value: String!
+}
+
+"""
+Identifier formats available for advisories.
+"""
+enum SecurityAdvisoryIdentifierType {
+  """
+  Common Vulnerabilities and Exposures Identifier.
+  """
+  CVE
+
+  """
+  GitHub Security Advisory ID.
+  """
+  GHSA
+}
+
+"""
+Ordering options for security advisory connections
+"""
+input SecurityAdvisoryOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order security advisories by.
+  """
+  field: SecurityAdvisoryOrderField!
+}
+
+"""
+Properties by which security advisory connections can be ordered.
+"""
+enum SecurityAdvisoryOrderField {
+  """
+  Order advisories by publication time
+  """
+  PUBLISHED_AT
+
+  """
+  Order advisories by update time
+  """
+  UPDATED_AT
+}
+
+"""
+An individual package
+"""
+type SecurityAdvisoryPackage {
+  """
+  The ecosystem the package belongs to, e.g. RUBYGEMS, NPM
+  """
+  ecosystem: SecurityAdvisoryEcosystem!
+
+  """
+  The package name
+  """
+  name: String!
+}
+
+"""
+An individual package version
+"""
+type SecurityAdvisoryPackageVersion {
+  """
+  The package name or version
+  """
+  identifier: String!
+}
+
+"""
+A GitHub Security Advisory Reference
+"""
+type SecurityAdvisoryReference {
+  """
+  A publicly accessible reference
+  """
+  url: URI!
+}
+
+"""
+Severity of the vulnerability.
+"""
+enum SecurityAdvisorySeverity {
+  """
+  Critical.
+  """
+  CRITICAL
+
+  """
+  High.
+  """
+  HIGH
+
+  """
+  Low.
+  """
+  LOW
+
+  """
+  Moderate.
+  """
+  MODERATE
+}
+
+"""
+An individual vulnerability within an Advisory
+"""
+type SecurityVulnerability {
+  """
+  The Advisory associated with this Vulnerability
+  """
+  advisory: SecurityAdvisory!
+
+  """
+  The first version containing a fix for the vulnerability
+  """
+  firstPatchedVersion: SecurityAdvisoryPackageVersion
+
+  """
+  A description of the vulnerable package
+  """
+  package: SecurityAdvisoryPackage!
+
+  """
+  The severity of the vulnerability within this package
+  """
+  severity: SecurityAdvisorySeverity!
+
+  """
+  When the vulnerability was last updated
+  """
+  updatedAt: DateTime!
+
+  """
+  A string that describes the vulnerable package versions.
+  This string follows a basic syntax with a few forms.
+  + `= 0.2.0` denotes a single vulnerable version.
+  + `<= 1.0.8` denotes a version range up to and including the specified version
+  + `< 0.1.11` denotes a version range up to, but excluding, the specified version
+  + `>= 4.3.0, < 4.3.5` denotes a version range with a known minimum and maximum version.
+  + `>= 0.0.1` denotes a version range with a known minimum, but no known maximum
+  """
+  vulnerableVersionRange: String!
+}
+
+"""
+The connection type for SecurityVulnerability.
+"""
+type SecurityVulnerabilityConnection {
+  """
+  A list of edges.
+  """
+  edges: [SecurityVulnerabilityEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [SecurityVulnerability]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SecurityVulnerabilityEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: SecurityVulnerability
+}
+
+"""
+Ordering options for security vulnerability connections
+"""
+input SecurityVulnerabilityOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order security vulnerabilities by.
+  """
+  field: SecurityVulnerabilityOrderField!
+}
+
+"""
+Properties by which security vulnerability connections can be ordered.
+"""
+enum SecurityVulnerabilityOrderField {
+  """
+  Order vulnerability by update time
+  """
+  UPDATED_AT
+}
+
+"""
+Autogenerated input type of SetEnterpriseIdentityProvider
+"""
+input SetEnterpriseIdentityProviderInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The digest algorithm used to sign SAML requests for the identity provider.
+  """
+  digestMethod: SamlDigestAlgorithm!
+
+  """
+  The ID of the enterprise on which to set an identity provider.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The x509 certificate used by the identity provider to sign assertions and responses.
+  """
+  idpCertificate: String!
+
+  """
+  The Issuer Entity ID for the SAML identity provider
+  """
+  issuer: String
+
+  """
+  The signature algorithm used to sign SAML requests for the identity provider.
+  """
+  signatureMethod: SamlSignatureAlgorithm!
+
+  """
+  The URL endpoint for the identity provider's SAML SSO.
+  """
+  ssoUrl: URI!
+}
+
+"""
+Autogenerated return type of SetEnterpriseIdentityProvider
+"""
+type SetEnterpriseIdentityProviderPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The identity provider for the enterprise.
+  """
+  identityProvider: EnterpriseIdentityProvider
+}
+
+"""
+Autogenerated input type of SetOrganizationInteractionLimit
+"""
+input SetOrganizationInteractionLimitInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  When this limit should expire.
+  """
+  expiry: RepositoryInteractionLimitExpiry
+
+  """
+  The limit to set.
+  """
+  limit: RepositoryInteractionLimit!
+
+  """
+  The ID of the organization to set a limit for.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of SetOrganizationInteractionLimit
+"""
+type SetOrganizationInteractionLimitPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The organization that the interaction limit was set for.
+  """
+  organization: Organization
+}
+
+"""
+Autogenerated input type of SetRepositoryInteractionLimit
+"""
+input SetRepositoryInteractionLimitInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  When this limit should expire.
+  """
+  expiry: RepositoryInteractionLimitExpiry
+
+  """
+  The limit to set.
+  """
+  limit: RepositoryInteractionLimit!
+
+  """
+  The ID of the repository to set a limit for.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of SetRepositoryInteractionLimit
+"""
+type SetRepositoryInteractionLimitPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The repository that the interaction limit was set for.
+  """
+  repository: Repository
+}
+
+"""
+Autogenerated input type of SetUserInteractionLimit
+"""
+input SetUserInteractionLimitInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  When this limit should expire.
+  """
+  expiry: RepositoryInteractionLimitExpiry
+
+  """
+  The limit to set.
+  """
+  limit: RepositoryInteractionLimit!
+
+  """
+  The ID of the user to set a limit for.
+  """
+  userId: ID! @possibleTypes(concreteTypes: ["User"])
+}
+
+"""
+Autogenerated return type of SetUserInteractionLimit
+"""
+type SetUserInteractionLimitPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The user that the interaction limit was set for.
+  """
+  user: User
+}
+
+"""
+Represents an S/MIME signature on a Commit or Tag.
+"""
+type SmimeSignature implements GitSignature {
+  """
+  Email used to sign this object.
+  """
+  email: String!
+
+  """
+  True if the signature is valid and verified by GitHub.
+  """
+  isValid: Boolean!
+
+  """
+  Payload for GPG signing object. Raw ODB object without the signature header.
+  """
+  payload: String!
+
+  """
+  ASCII-armored signature header from object.
+  """
+  signature: String!
+
+  """
+  GitHub user corresponding to the email signing this commit.
+  """
+  signer: User
+
+  """
+  The state of this signature. `VALID` if signature is valid and verified by
+  GitHub, otherwise represents reason why signature is considered invalid.
+  """
+  state: GitSignatureState!
+
+  """
+  True if the signature was made with GitHub's signing key.
+  """
+  wasSignedByGitHub: Boolean!
+}
+
+"""
+Social media profile associated with a user.
+"""
+type SocialAccount {
+  """
+  Name of the social media account as it appears on the profile.
+  """
+  displayName: String!
+
+  """
+  Software or company that hosts the social media account.
+  """
+  provider: SocialAccountProvider!
+
+  """
+  URL of the social media account.
+  """
+  url: URI!
+}
+
+"""
+The connection type for SocialAccount.
+"""
+type SocialAccountConnection {
+  """
+  A list of edges.
+  """
+  edges: [SocialAccountEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [SocialAccount]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SocialAccountEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: SocialAccount
+}
+
+"""
+Software or company that hosts social media accounts.
+"""
+enum SocialAccountProvider {
+  """
+  Social media and networking website.
+  """
+  FACEBOOK
+
+  """
+  Catch-all for social media providers that do not yet have specific handling.
+  """
+  GENERIC
+
+  """
+  Fork of Mastodon with a greater focus on local posting.
+  """
+  HOMETOWN
+
+  """
+  Social media website with a focus on photo and video sharing.
+  """
+  INSTAGRAM
+
+  """
+  Professional networking website.
+  """
+  LINKEDIN
+
+  """
+  Open-source federated microblogging service.
+  """
+  MASTODON
+
+  """
+  JavaScript package registry.
+  """
+  NPM
+
+  """
+  Social news aggregation and discussion website.
+  """
+  REDDIT
+
+  """
+  Live-streaming service.
+  """
+  TWITCH
+
+  """
+  Microblogging website.
+  """
+  TWITTER
+
+  """
+  Online video platform.
+  """
+  YOUTUBE
+}
+
+"""
+Entities that can sponsor others via GitHub Sponsors
+"""
+union Sponsor = Organization | User
+
+"""
+The connection type for Sponsor.
+"""
+type SponsorConnection {
+  """
+  A list of edges.
+  """
+  edges: [SponsorEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Sponsor]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a user or organization who is sponsoring someone in GitHub Sponsors.
+"""
+type SponsorEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Sponsor
+}
+
+"""
+Ordering options for connections to get sponsor entities for GitHub Sponsors.
+"""
+input SponsorOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order sponsor entities by.
+  """
+  field: SponsorOrderField!
+}
+
+"""
+Properties by which sponsor connections can be ordered.
+"""
+enum SponsorOrderField {
+  """
+  Order sponsorable entities by login (username).
+  """
+  LOGIN
+
+  """
+  Order sponsors by their relevance to the viewer.
+  """
+  RELEVANCE
+}
+
+"""
+Entities that can sponsor or be sponsored through GitHub Sponsors.
+"""
+interface Sponsorable {
+  """
+  The estimated next GitHub Sponsors payout for this user/organization in cents (USD).
+  """
+  estimatedNextSponsorsPayoutInCents: Int!
+
+  """
+  True if this user/organization has a GitHub Sponsors listing.
+  """
+  hasSponsorsListing: Boolean!
+
+  """
+  Whether the given account is sponsoring this user/organization.
+  """
+  isSponsoredBy(
+    """
+    The target account's login.
+    """
+    accountLogin: String!
+  ): Boolean!
+
+  """
+  True if the viewer is sponsored by this user/organization.
+  """
+  isSponsoringViewer: Boolean!
+
+  """
+  The estimated monthly GitHub Sponsors income for this user/organization in cents (USD).
+  """
+  monthlyEstimatedSponsorsIncomeInCents: Int!
+
+  """
+  List of users and organizations this entity is sponsoring.
+  """
+  sponsoring(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the users and organizations returned from the connection.
+    """
+    orderBy: SponsorOrder = {field: RELEVANCE, direction: DESC}
+  ): SponsorConnection!
+
+  """
+  List of sponsors for this user or organization.
+  """
+  sponsors(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsors returned from the connection.
+    """
+    orderBy: SponsorOrder = {field: RELEVANCE, direction: DESC}
+
+    """
+    If given, will filter for sponsors at the given tier. Will only return
+    sponsors whose tier the viewer is permitted to see.
+    """
+    tierId: ID
+  ): SponsorConnection!
+
+  """
+  Events involving this sponsorable, such as new sponsorships.
+  """
+  sponsorsActivities(
+    """
+    Filter activities to only the specified actions.
+    """
+    actions: [SponsorsActivityAction!] = []
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Whether to include those events where this sponsorable acted as the sponsor.
+    Defaults to only including events where this sponsorable was the recipient
+    of a sponsorship.
+    """
+    includeAsSponsor: Boolean = false
+
+    """
+    Whether or not to include private activities in the result set. Defaults to including public and private activities.
+    """
+    includePrivate: Boolean = true
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for activity returned from the connection.
+    """
+    orderBy: SponsorsActivityOrder = {field: TIMESTAMP, direction: DESC}
+
+    """
+    Filter activities returned to only those that occurred in the most recent
+    specified time period. Set to ALL to avoid filtering by when the activity
+    occurred. Will be ignored if `since` or `until` is given.
+    """
+    period: SponsorsActivityPeriod = MONTH
+
+    """
+    Filter activities to those that occurred on or after this time.
+    """
+    since: DateTime
+
+    """
+    Filter activities to those that occurred before this time.
+    """
+    until: DateTime
+  ): SponsorsActivityConnection!
+
+  """
+  The GitHub Sponsors listing for this user or organization.
+  """
+  sponsorsListing: SponsorsListing
+
+  """
+  The sponsorship from the viewer to this user/organization; that is, the sponsorship where you're the sponsor.
+  """
+  sponsorshipForViewerAsSponsor(
+    """
+    Whether to return the sponsorship only if it's still active. Pass false to
+    get the viewer's sponsorship back even if it has been cancelled.
+    """
+    activeOnly: Boolean = true
+  ): Sponsorship
+
+  """
+  The sponsorship from this user/organization to the viewer; that is, the sponsorship you're receiving.
+  """
+  sponsorshipForViewerAsSponsorable(
+    """
+    Whether to return the sponsorship only if it's still active. Pass false to
+    get the sponsorship back even if it has been cancelled.
+    """
+    activeOnly: Boolean = true
+  ): Sponsorship
+
+  """
+  List of sponsorship updates sent from this sponsorable to sponsors.
+  """
+  sponsorshipNewsletters(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsorship updates returned from the connection.
+    """
+    orderBy: SponsorshipNewsletterOrder = {field: CREATED_AT, direction: DESC}
+  ): SponsorshipNewsletterConnection!
+
+  """
+  The sponsorships where this user or organization is the maintainer receiving the funds.
+  """
+  sponsorshipsAsMaintainer(
+    """
+    Whether to include only sponsorships that are active right now, versus all
+    sponsorships this maintainer has ever received.
+    """
+    activeOnly: Boolean = true
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Whether or not to include private sponsorships in the result set
+    """
+    includePrivate: Boolean = false
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsorships returned from this connection. If left
+    blank, the sponsorships will be ordered based on relevancy to the viewer.
+    """
+    orderBy: SponsorshipOrder
+  ): SponsorshipConnection!
+
+  """
+  The sponsorships where this user or organization is the funder.
+  """
+  sponsorshipsAsSponsor(
+    """
+    Whether to include only sponsorships that are active right now, versus all sponsorships this sponsor has ever made.
+    """
+    activeOnly: Boolean = true
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter sponsorships returned to those for the specified maintainers. That
+    is, the recipient of the sponsorship is a user or organization with one of
+    the given logins.
+    """
+    maintainerLogins: [String!]
+
+    """
+    Ordering options for sponsorships returned from this connection. If left
+    blank, the sponsorships will be ordered based on relevancy to the viewer.
+    """
+    orderBy: SponsorshipOrder
+  ): SponsorshipConnection!
+
+  """
+  The amount in United States cents (e.g., 500 = $5.00 USD) that this entity has
+  spent on GitHub to fund sponsorships. Only returns a value when viewed by the
+  user themselves or by a user who can manage sponsorships for the requested organization.
+  """
+  totalSponsorshipAmountAsSponsorInCents(
+    """
+    Filter payments to those that occurred on or after this time.
+    """
+    since: DateTime
+
+    """
+    Filter payments to those made to the users or organizations with the specified usernames.
+    """
+    sponsorableLogins: [String!] = []
+
+    """
+    Filter payments to those that occurred before this time.
+    """
+    until: DateTime
+  ): Int
+
+  """
+  Whether or not the viewer is able to sponsor this user/organization.
+  """
+  viewerCanSponsor: Boolean!
+
+  """
+  True if the viewer is sponsoring this user/organization.
+  """
+  viewerIsSponsoring: Boolean!
+}
+
+"""
+Entities that can be sponsored via GitHub Sponsors
+"""
+union SponsorableItem = Organization | User
+
+"""
+The connection type for SponsorableItem.
+"""
+type SponsorableItemConnection {
+  """
+  A list of edges.
+  """
+  edges: [SponsorableItemEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [SponsorableItem]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SponsorableItemEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: SponsorableItem
+}
+
+"""
+Ordering options for connections to get sponsorable entities for GitHub Sponsors.
+"""
+input SponsorableOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order sponsorable entities by.
+  """
+  field: SponsorableOrderField!
+}
+
+"""
+Properties by which sponsorable connections can be ordered.
+"""
+enum SponsorableOrderField {
+  """
+  Order sponsorable entities by login (username).
+  """
+  LOGIN
+}
+
+"""
+An event related to sponsorship activity.
+"""
+type SponsorsActivity implements Node {
+  """
+  What action this activity indicates took place.
+  """
+  action: SponsorsActivityAction!
+
+  """
+  The sponsor's current privacy level.
+  """
+  currentPrivacyLevel: SponsorshipPrivacy
+
+  """
+  The Node ID of the SponsorsActivity object
+  """
+  id: ID!
+
+  """
+  The platform that was used to pay for the sponsorship.
+  """
+  paymentSource: SponsorshipPaymentSource
+
+  """
+  The tier that the sponsorship used to use, for tier change events.
+  """
+  previousSponsorsTier: SponsorsTier
+
+  """
+  The user or organization who triggered this activity and was/is sponsoring the sponsorable.
+  """
+  sponsor: Sponsor
+
+  """
+  The user or organization that is being sponsored, the maintainer.
+  """
+  sponsorable: Sponsorable!
+
+  """
+  The associated sponsorship tier.
+  """
+  sponsorsTier: SponsorsTier
+
+  """
+  The timestamp of this event.
+  """
+  timestamp: DateTime
+
+  """
+  Was this sponsorship made alongside other sponsorships at the same time from the same sponsor?
+  """
+  viaBulkSponsorship: Boolean!
+}
+
+"""
+The possible actions that GitHub Sponsors activities can represent.
+"""
+enum SponsorsActivityAction {
+  """
+  The activity was cancelling a sponsorship.
+  """
+  CANCELLED_SPONSORSHIP
+
+  """
+  The activity was starting a sponsorship.
+  """
+  NEW_SPONSORSHIP
+
+  """
+  The activity was scheduling a downgrade or cancellation.
+  """
+  PENDING_CHANGE
+
+  """
+  The activity was funds being refunded to the sponsor or GitHub.
+  """
+  REFUND
+
+  """
+  The activity was disabling matching for a previously matched sponsorship.
+  """
+  SPONSOR_MATCH_DISABLED
+
+  """
+  The activity was changing the sponsorship tier, either directly by the sponsor or by a scheduled/pending change.
+  """
+  TIER_CHANGE
+}
+
+"""
+The connection type for SponsorsActivity.
+"""
+type SponsorsActivityConnection {
+  """
+  A list of edges.
+  """
+  edges: [SponsorsActivityEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [SponsorsActivity]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SponsorsActivityEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: SponsorsActivity
+}
+
+"""
+Ordering options for GitHub Sponsors activity connections.
+"""
+input SponsorsActivityOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order activity by.
+  """
+  field: SponsorsActivityOrderField!
+}
+
+"""
+Properties by which GitHub Sponsors activity connections can be ordered.
+"""
+enum SponsorsActivityOrderField {
+  """
+  Order activities by when they happened.
+  """
+  TIMESTAMP
+}
+
+"""
+The possible time periods for which Sponsors activities can be requested.
+"""
+enum SponsorsActivityPeriod {
+  """
+  Don't restrict the activity to any date range, include all activity.
+  """
+  ALL
+
+  """
+  The previous calendar day.
+  """
+  DAY
+
+  """
+  The previous thirty days.
+  """
+  MONTH
+
+  """
+  The previous seven days.
+  """
+  WEEK
+}
+
+"""
+Represents countries or regions for billing and residence for a GitHub Sponsors profile.
+"""
+enum SponsorsCountryOrRegionCode {
+  """
+  Andorra
+  """
+  AD
+
+  """
+  United Arab Emirates
+  """
+  AE
+
+  """
+  Afghanistan
+  """
+  AF
+
+  """
+  Antigua and Barbuda
+  """
+  AG
+
+  """
+  Anguilla
+  """
+  AI
+
+  """
+  Albania
+  """
+  AL
+
+  """
+  Armenia
+  """
+  AM
+
+  """
+  Angola
+  """
+  AO
+
+  """
+  Antarctica
+  """
+  AQ
+
+  """
+  Argentina
+  """
+  AR
+
+  """
+  American Samoa
+  """
+  AS
+
+  """
+  Austria
+  """
+  AT
+
+  """
+  Australia
+  """
+  AU
+
+  """
+  Aruba
+  """
+  AW
+
+  """
+  Åland
+  """
+  AX
+
+  """
+  Azerbaijan
+  """
+  AZ
+
+  """
+  Bosnia and Herzegovina
+  """
+  BA
+
+  """
+  Barbados
+  """
+  BB
+
+  """
+  Bangladesh
+  """
+  BD
+
+  """
+  Belgium
+  """
+  BE
+
+  """
+  Burkina Faso
+  """
+  BF
+
+  """
+  Bulgaria
+  """
+  BG
+
+  """
+  Bahrain
+  """
+  BH
+
+  """
+  Burundi
+  """
+  BI
+
+  """
+  Benin
+  """
+  BJ
+
+  """
+  Saint Barthélemy
+  """
+  BL
+
+  """
+  Bermuda
+  """
+  BM
+
+  """
+  Brunei Darussalam
+  """
+  BN
+
+  """
+  Bolivia
+  """
+  BO
+
+  """
+  Bonaire, Sint Eustatius and Saba
+  """
+  BQ
+
+  """
+  Brazil
+  """
+  BR
+
+  """
+  Bahamas
+  """
+  BS
+
+  """
+  Bhutan
+  """
+  BT
+
+  """
+  Bouvet Island
+  """
+  BV
+
+  """
+  Botswana
+  """
+  BW
+
+  """
+  Belarus
+  """
+  BY
+
+  """
+  Belize
+  """
+  BZ
+
+  """
+  Canada
+  """
+  CA
+
+  """
+  Cocos (Keeling) Islands
+  """
+  CC
+
+  """
+  Congo (Kinshasa)
+  """
+  CD
+
+  """
+  Central African Republic
+  """
+  CF
+
+  """
+  Congo (Brazzaville)
+  """
+  CG
+
+  """
+  Switzerland
+  """
+  CH
+
+  """
+  Côte d'Ivoire
+  """
+  CI
+
+  """
+  Cook Islands
+  """
+  CK
+
+  """
+  Chile
+  """
+  CL
+
+  """
+  Cameroon
+  """
+  CM
+
+  """
+  China
+  """
+  CN
+
+  """
+  Colombia
+  """
+  CO
+
+  """
+  Costa Rica
+  """
+  CR
+
+  """
+  Cape Verde
+  """
+  CV
+
+  """
+  Curaçao
+  """
+  CW
+
+  """
+  Christmas Island
+  """
+  CX
+
+  """
+  Cyprus
+  """
+  CY
+
+  """
+  Czech Republic
+  """
+  CZ
+
+  """
+  Germany
+  """
+  DE
+
+  """
+  Djibouti
+  """
+  DJ
+
+  """
+  Denmark
+  """
+  DK
+
+  """
+  Dominica
+  """
+  DM
+
+  """
+  Dominican Republic
+  """
+  DO
+
+  """
+  Algeria
+  """
+  DZ
+
+  """
+  Ecuador
+  """
+  EC
+
+  """
+  Estonia
+  """
+  EE
+
+  """
+  Egypt
+  """
+  EG
+
+  """
+  Western Sahara
+  """
+  EH
+
+  """
+  Eritrea
+  """
+  ER
+
+  """
+  Spain
+  """
+  ES
+
+  """
+  Ethiopia
+  """
+  ET
+
+  """
+  Finland
+  """
+  FI
+
+  """
+  Fiji
+  """
+  FJ
+
+  """
+  Falkland Islands
+  """
+  FK
+
+  """
+  Micronesia
+  """
+  FM
+
+  """
+  Faroe Islands
+  """
+  FO
+
+  """
+  France
+  """
+  FR
+
+  """
+  Gabon
+  """
+  GA
+
+  """
+  United Kingdom
+  """
+  GB
+
+  """
+  Grenada
+  """
+  GD
+
+  """
+  Georgia
+  """
+  GE
+
+  """
+  French Guiana
+  """
+  GF
+
+  """
+  Guernsey
+  """
+  GG
+
+  """
+  Ghana
+  """
+  GH
+
+  """
+  Gibraltar
+  """
+  GI
+
+  """
+  Greenland
+  """
+  GL
+
+  """
+  Gambia
+  """
+  GM
+
+  """
+  Guinea
+  """
+  GN
+
+  """
+  Guadeloupe
+  """
+  GP
+
+  """
+  Equatorial Guinea
+  """
+  GQ
+
+  """
+  Greece
+  """
+  GR
+
+  """
+  South Georgia and South Sandwich Islands
+  """
+  GS
+
+  """
+  Guatemala
+  """
+  GT
+
+  """
+  Guam
+  """
+  GU
+
+  """
+  Guinea-Bissau
+  """
+  GW
+
+  """
+  Guyana
+  """
+  GY
+
+  """
+  Hong Kong
+  """
+  HK
+
+  """
+  Heard and McDonald Islands
+  """
+  HM
+
+  """
+  Honduras
+  """
+  HN
+
+  """
+  Croatia
+  """
+  HR
+
+  """
+  Haiti
+  """
+  HT
+
+  """
+  Hungary
+  """
+  HU
+
+  """
+  Indonesia
+  """
+  ID
+
+  """
+  Ireland
+  """
+  IE
+
+  """
+  Israel
+  """
+  IL
+
+  """
+  Isle of Man
+  """
+  IM
+
+  """
+  India
+  """
+  IN
+
+  """
+  British Indian Ocean Territory
+  """
+  IO
+
+  """
+  Iraq
+  """
+  IQ
+
+  """
+  Iran
+  """
+  IR
+
+  """
+  Iceland
+  """
+  IS
+
+  """
+  Italy
+  """
+  IT
+
+  """
+  Jersey
+  """
+  JE
+
+  """
+  Jamaica
+  """
+  JM
+
+  """
+  Jordan
+  """
+  JO
+
+  """
+  Japan
+  """
+  JP
+
+  """
+  Kenya
+  """
+  KE
+
+  """
+  Kyrgyzstan
+  """
+  KG
+
+  """
+  Cambodia
+  """
+  KH
+
+  """
+  Kiribati
+  """
+  KI
+
+  """
+  Comoros
+  """
+  KM
+
+  """
+  Saint Kitts and Nevis
+  """
+  KN
+
+  """
+  Korea, South
+  """
+  KR
+
+  """
+  Kuwait
+  """
+  KW
+
+  """
+  Cayman Islands
+  """
+  KY
+
+  """
+  Kazakhstan
+  """
+  KZ
+
+  """
+  Laos
+  """
+  LA
+
+  """
+  Lebanon
+  """
+  LB
+
+  """
+  Saint Lucia
+  """
+  LC
+
+  """
+  Liechtenstein
+  """
+  LI
+
+  """
+  Sri Lanka
+  """
+  LK
+
+  """
+  Liberia
+  """
+  LR
+
+  """
+  Lesotho
+  """
+  LS
+
+  """
+  Lithuania
+  """
+  LT
+
+  """
+  Luxembourg
+  """
+  LU
+
+  """
+  Latvia
+  """
+  LV
+
+  """
+  Libya
+  """
+  LY
+
+  """
+  Morocco
+  """
+  MA
+
+  """
+  Monaco
+  """
+  MC
+
+  """
+  Moldova
+  """
+  MD
+
+  """
+  Montenegro
+  """
+  ME
+
+  """
+  Saint Martin (French part)
+  """
+  MF
+
+  """
+  Madagascar
+  """
+  MG
+
+  """
+  Marshall Islands
+  """
+  MH
+
+  """
+  Macedonia
+  """
+  MK
+
+  """
+  Mali
+  """
+  ML
+
+  """
+  Myanmar
+  """
+  MM
+
+  """
+  Mongolia
+  """
+  MN
+
+  """
+  Macau
+  """
+  MO
+
+  """
+  Northern Mariana Islands
+  """
+  MP
+
+  """
+  Martinique
+  """
+  MQ
+
+  """
+  Mauritania
+  """
+  MR
+
+  """
+  Montserrat
+  """
+  MS
+
+  """
+  Malta
+  """
+  MT
+
+  """
+  Mauritius
+  """
+  MU
+
+  """
+  Maldives
+  """
+  MV
+
+  """
+  Malawi
+  """
+  MW
+
+  """
+  Mexico
+  """
+  MX
+
+  """
+  Malaysia
+  """
+  MY
+
+  """
+  Mozambique
+  """
+  MZ
+
+  """
+  Namibia
+  """
+  NA
+
+  """
+  New Caledonia
+  """
+  NC
+
+  """
+  Niger
+  """
+  NE
+
+  """
+  Norfolk Island
+  """
+  NF
+
+  """
+  Nigeria
+  """
+  NG
+
+  """
+  Nicaragua
+  """
+  NI
+
+  """
+  Netherlands
+  """
+  NL
+
+  """
+  Norway
+  """
+  NO
+
+  """
+  Nepal
+  """
+  NP
+
+  """
+  Nauru
+  """
+  NR
+
+  """
+  Niue
+  """
+  NU
+
+  """
+  New Zealand
+  """
+  NZ
+
+  """
+  Oman
+  """
+  OM
+
+  """
+  Panama
+  """
+  PA
+
+  """
+  Peru
+  """
+  PE
+
+  """
+  French Polynesia
+  """
+  PF
+
+  """
+  Papua New Guinea
+  """
+  PG
+
+  """
+  Philippines
+  """
+  PH
+
+  """
+  Pakistan
+  """
+  PK
+
+  """
+  Poland
+  """
+  PL
+
+  """
+  Saint Pierre and Miquelon
+  """
+  PM
+
+  """
+  Pitcairn
+  """
+  PN
+
+  """
+  Puerto Rico
+  """
+  PR
+
+  """
+  Palestine
+  """
+  PS
+
+  """
+  Portugal
+  """
+  PT
+
+  """
+  Palau
+  """
+  PW
+
+  """
+  Paraguay
+  """
+  PY
+
+  """
+  Qatar
+  """
+  QA
+
+  """
+  Reunion
+  """
+  RE
+
+  """
+  Romania
+  """
+  RO
+
+  """
+  Serbia
+  """
+  RS
+
+  """
+  Russian Federation
+  """
+  RU
+
+  """
+  Rwanda
+  """
+  RW
+
+  """
+  Saudi Arabia
+  """
+  SA
+
+  """
+  Solomon Islands
+  """
+  SB
+
+  """
+  Seychelles
+  """
+  SC
+
+  """
+  Sudan
+  """
+  SD
+
+  """
+  Sweden
+  """
+  SE
+
+  """
+  Singapore
+  """
+  SG
+
+  """
+  Saint Helena
+  """
+  SH
+
+  """
+  Slovenia
+  """
+  SI
+
+  """
+  Svalbard and Jan Mayen Islands
+  """
+  SJ
+
+  """
+  Slovakia
+  """
+  SK
+
+  """
+  Sierra Leone
+  """
+  SL
+
+  """
+  San Marino
+  """
+  SM
+
+  """
+  Senegal
+  """
+  SN
+
+  """
+  Somalia
+  """
+  SO
+
+  """
+  Suriname
+  """
+  SR
+
+  """
+  South Sudan
+  """
+  SS
+
+  """
+  Sao Tome and Principe
+  """
+  ST
+
+  """
+  El Salvador
+  """
+  SV
+
+  """
+  Sint Maarten (Dutch part)
+  """
+  SX
+
+  """
+  Swaziland
+  """
+  SZ
+
+  """
+  Turks and Caicos Islands
+  """
+  TC
+
+  """
+  Chad
+  """
+  TD
+
+  """
+  French Southern Lands
+  """
+  TF
+
+  """
+  Togo
+  """
+  TG
+
+  """
+  Thailand
+  """
+  TH
+
+  """
+  Tajikistan
+  """
+  TJ
+
+  """
+  Tokelau
+  """
+  TK
+
+  """
+  Timor-Leste
+  """
+  TL
+
+  """
+  Turkmenistan
+  """
+  TM
+
+  """
+  Tunisia
+  """
+  TN
+
+  """
+  Tonga
+  """
+  TO
+
+  """
+  Türkiye
+  """
+  TR
+
+  """
+  Trinidad and Tobago
+  """
+  TT
+
+  """
+  Tuvalu
+  """
+  TV
+
+  """
+  Taiwan
+  """
+  TW
+
+  """
+  Tanzania
+  """
+  TZ
+
+  """
+  Ukraine
+  """
+  UA
+
+  """
+  Uganda
+  """
+  UG
+
+  """
+  United States Minor Outlying Islands
+  """
+  UM
+
+  """
+  United States of America
+  """
+  US
+
+  """
+  Uruguay
+  """
+  UY
+
+  """
+  Uzbekistan
+  """
+  UZ
+
+  """
+  Vatican City
+  """
+  VA
+
+  """
+  Saint Vincent and the Grenadines
+  """
+  VC
+
+  """
+  Venezuela
+  """
+  VE
+
+  """
+  Virgin Islands, British
+  """
+  VG
+
+  """
+  Virgin Islands, U.S.
+  """
+  VI
+
+  """
+  Vietnam
+  """
+  VN
+
+  """
+  Vanuatu
+  """
+  VU
+
+  """
+  Wallis and Futuna Islands
+  """
+  WF
+
+  """
+  Samoa
+  """
+  WS
+
+  """
+  Yemen
+  """
+  YE
+
+  """
+  Mayotte
+  """
+  YT
+
+  """
+  South Africa
+  """
+  ZA
+
+  """
+  Zambia
+  """
+  ZM
+
+  """
+  Zimbabwe
+  """
+  ZW
+}
+
+"""
+A goal associated with a GitHub Sponsors listing, representing a target the sponsored maintainer would like to attain.
+"""
+type SponsorsGoal {
+  """
+  A description of the goal from the maintainer.
+  """
+  description: String
+
+  """
+  What the objective of this goal is.
+  """
+  kind: SponsorsGoalKind!
+
+  """
+  The percentage representing how complete this goal is, between 0-100.
+  """
+  percentComplete: Int!
+
+  """
+  What the goal amount is. Represents an amount in USD for monthly sponsorship
+  amount goals. Represents a count of unique sponsors for total sponsors count goals.
+  """
+  targetValue: Int!
+
+  """
+  A brief summary of the kind and target value of this goal.
+  """
+  title: String!
+}
+
+"""
+The different kinds of goals a GitHub Sponsors member can have.
+"""
+enum SponsorsGoalKind {
+  """
+  The goal is about getting a certain amount in USD from sponsorships each month.
+  """
+  MONTHLY_SPONSORSHIP_AMOUNT
+
+  """
+  The goal is about reaching a certain number of sponsors.
+  """
+  TOTAL_SPONSORS_COUNT
+}
+
+"""
+A GitHub Sponsors listing.
+"""
+type SponsorsListing implements Node {
+  """
+  The current goal the maintainer is trying to reach with GitHub Sponsors, if any.
+  """
+  activeGoal: SponsorsGoal
+
+  """
+  The Stripe Connect account currently in use for payouts for this Sponsors
+  listing, if any. Will only return a value when queried by the maintainer
+  themselves, or by an admin of the sponsorable organization.
+  """
+  activeStripeConnectAccount: StripeConnectAccount
+
+  """
+  The name of the country or region with the maintainer's bank account or fiscal
+  host. Will only return a value when queried by the maintainer themselves, or
+  by an admin of the sponsorable organization.
+  """
+  billingCountryOrRegion: String
+
+  """
+  The email address used by GitHub to contact the sponsorable about their GitHub
+  Sponsors profile. Will only return a value when queried by the maintainer
+  themselves, or by an admin of the sponsorable organization.
+  """
+  contactEmailAddress: String
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The HTTP path for the Sponsors dashboard for this Sponsors listing.
+  """
+  dashboardResourcePath: URI!
+
+  """
+  The HTTP URL for the Sponsors dashboard for this Sponsors listing.
+  """
+  dashboardUrl: URI!
+
+  """
+  The records featured on the GitHub Sponsors profile.
+  """
+  featuredItems(
+    """
+    The types of featured items to return.
+    """
+    featureableTypes: [SponsorsListingFeaturedItemFeatureableType!] = [REPOSITORY, USER]
+  ): [SponsorsListingFeaturedItem!]!
+
+  """
+  The fiscal host used for payments, if any. Will only return a value when
+  queried by the maintainer themselves, or by an admin of the sponsorable organization.
+  """
+  fiscalHost: Organization
+
+  """
+  The full description of the listing.
+  """
+  fullDescription: String!
+
+  """
+  The full description of the listing rendered to HTML.
+  """
+  fullDescriptionHTML: HTML!
+
+  """
+  The Node ID of the SponsorsListing object
+  """
+  id: ID!
+
+  """
+  Whether this listing is publicly visible.
+  """
+  isPublic: Boolean!
+
+  """
+  The listing's full name.
+  """
+  name: String!
+
+  """
+  A future date on which this listing is eligible to receive a payout.
+  """
+  nextPayoutDate: Date
+
+  """
+  The name of the country or region where the maintainer resides. Will only
+  return a value when queried by the maintainer themselves, or by an admin of
+  the sponsorable organization.
+  """
+  residenceCountryOrRegion: String
+
+  """
+  The HTTP path for this Sponsors listing.
+  """
+  resourcePath: URI!
+
+  """
+  The short description of the listing.
+  """
+  shortDescription: String!
+
+  """
+  The short name of the listing.
+  """
+  slug: String!
+
+  """
+  The entity this listing represents who can be sponsored on GitHub Sponsors.
+  """
+  sponsorable: Sponsorable!
+
+  """
+  The tiers for this GitHub Sponsors profile.
+  """
+  tiers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Whether to include tiers that aren't published. Only admins of the Sponsors
+    listing can see draft tiers. Only admins of the Sponsors listing and viewers
+    who are currently sponsoring on a retired tier can see those retired tiers.
+    Defaults to including only published tiers, which are visible to anyone who
+    can see the GitHub Sponsors profile.
+    """
+    includeUnpublished: Boolean = false
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for Sponsors tiers returned from the connection.
+    """
+    orderBy: SponsorsTierOrder = {field: MONTHLY_PRICE_IN_CENTS, direction: ASC}
+  ): SponsorsTierConnection
+
+  """
+  The HTTP URL for this Sponsors listing.
+  """
+  url: URI!
+}
+
+"""
+A record that can be featured on a GitHub Sponsors profile.
+"""
+union SponsorsListingFeatureableItem = Repository | User
+
+"""
+A record that is promoted on a GitHub Sponsors profile.
+"""
+type SponsorsListingFeaturedItem implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Will either be a description from the sponsorable maintainer about why they
+  featured this item, or the item's description itself, such as a user's bio
+  from their GitHub profile page.
+  """
+  description: String
+
+  """
+  The record that is featured on the GitHub Sponsors profile.
+  """
+  featureable: SponsorsListingFeatureableItem!
+
+  """
+  The Node ID of the SponsorsListingFeaturedItem object
+  """
+  id: ID!
+
+  """
+  The position of this featured item on the GitHub Sponsors profile with a lower
+  position indicating higher precedence. Starts at 1.
+  """
+  position: Int!
+
+  """
+  The GitHub Sponsors profile that features this record.
+  """
+  sponsorsListing: SponsorsListing!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The different kinds of records that can be featured on a GitHub Sponsors profile page.
+"""
+enum SponsorsListingFeaturedItemFeatureableType {
+  """
+  A repository owned by the user or organization with the GitHub Sponsors profile.
+  """
+  REPOSITORY
+
+  """
+  A user who belongs to the organization with the GitHub Sponsors profile.
+  """
+  USER
+}
+
+"""
+A GitHub Sponsors tier associated with a GitHub Sponsors listing.
+"""
+type SponsorsTier implements Node {
+  """
+  SponsorsTier information only visible to users that can administer the associated Sponsors listing.
+  """
+  adminInfo: SponsorsTierAdminInfo
+
+  """
+  Get a different tier for this tier's maintainer that is at the same frequency
+  as this tier but with an equal or lesser cost. Returns the published tier with
+  the monthly price closest to this tier's without going over.
+  """
+  closestLesserValueTier: SponsorsTier
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The description of the tier.
+  """
+  description: String!
+
+  """
+  The tier description rendered to HTML
+  """
+  descriptionHTML: HTML!
+
+  """
+  The Node ID of the SponsorsTier object
+  """
+  id: ID!
+
+  """
+  Whether this tier was chosen at checkout time by the sponsor rather than
+  defined ahead of time by the maintainer who manages the Sponsors listing.
+  """
+  isCustomAmount: Boolean!
+
+  """
+  Whether this tier is only for use with one-time sponsorships.
+  """
+  isOneTime: Boolean!
+
+  """
+  How much this tier costs per month in cents.
+  """
+  monthlyPriceInCents: Int!
+
+  """
+  How much this tier costs per month in USD.
+  """
+  monthlyPriceInDollars: Int!
+
+  """
+  The name of the tier.
+  """
+  name: String!
+
+  """
+  The sponsors listing that this tier belongs to.
+  """
+  sponsorsListing: SponsorsListing!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+SponsorsTier information only visible to users that can administer the associated Sponsors listing.
+"""
+type SponsorsTierAdminInfo {
+  """
+  Indicates whether this tier is still a work in progress by the sponsorable and
+  not yet published to the associated GitHub Sponsors profile. Draft tiers
+  cannot be used for new sponsorships and will not be in use on existing
+  sponsorships. Draft tiers cannot be seen by anyone but the admins of the
+  GitHub Sponsors profile.
+  """
+  isDraft: Boolean!
+
+  """
+  Indicates whether this tier is published to the associated GitHub Sponsors
+  profile. Published tiers are visible to anyone who can see the GitHub Sponsors
+  profile, and are available for use in sponsorships if the GitHub Sponsors
+  profile is publicly visible.
+  """
+  isPublished: Boolean!
+
+  """
+  Indicates whether this tier has been retired from the associated GitHub
+  Sponsors profile. Retired tiers are no longer shown on the GitHub Sponsors
+  profile and cannot be chosen for new sponsorships. Existing sponsorships may
+  still use retired tiers if the sponsor selected the tier before it was retired.
+  """
+  isRetired: Boolean!
+
+  """
+  The sponsorships using this tier.
+  """
+  sponsorships(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Whether or not to return private sponsorships using this tier. Defaults to
+    only returning public sponsorships on this tier.
+    """
+    includePrivate: Boolean = false
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsorships returned from this connection. If left
+    blank, the sponsorships will be ordered based on relevancy to the viewer.
+    """
+    orderBy: SponsorshipOrder
+  ): SponsorshipConnection!
+}
+
+"""
+The connection type for SponsorsTier.
+"""
+type SponsorsTierConnection {
+  """
+  A list of edges.
+  """
+  edges: [SponsorsTierEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [SponsorsTier]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SponsorsTierEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: SponsorsTier
+}
+
+"""
+Ordering options for Sponsors tiers connections.
+"""
+input SponsorsTierOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order tiers by.
+  """
+  field: SponsorsTierOrderField!
+}
+
+"""
+Properties by which Sponsors tiers connections can be ordered.
+"""
+enum SponsorsTierOrderField {
+  """
+  Order tiers by creation time.
+  """
+  CREATED_AT
+
+  """
+  Order tiers by their monthly price in cents
+  """
+  MONTHLY_PRICE_IN_CENTS
+}
+
+"""
+A sponsorship relationship between a sponsor and a maintainer
+"""
+type Sponsorship implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the Sponsorship object
+  """
+  id: ID!
+
+  """
+  Whether the sponsorship is active. False implies the sponsor is a past sponsor
+  of the maintainer, while true implies they are a current sponsor.
+  """
+  isActive: Boolean!
+
+  """
+  Whether this sponsorship represents a one-time payment versus a recurring sponsorship.
+  """
+  isOneTimePayment: Boolean!
+
+  """
+  Whether the sponsor has chosen to receive sponsorship update emails sent from
+  the sponsorable. Only returns a non-null value when the viewer has permission to know this.
+  """
+  isSponsorOptedIntoEmail: Boolean
+
+  """
+  The entity that is being sponsored
+  """
+  maintainer: User!
+    @deprecated(
+      reason: "`Sponsorship.maintainer` will be removed. Use `Sponsorship.sponsorable` instead. Removal on 2020-04-01 UTC."
+    )
+
+  """
+  The platform that was most recently used to pay for the sponsorship.
+  """
+  paymentSource: SponsorshipPaymentSource
+
+  """
+  The privacy level for this sponsorship.
+  """
+  privacyLevel: SponsorshipPrivacy!
+
+  """
+  The user that is sponsoring. Returns null if the sponsorship is private or if sponsor is not a user.
+  """
+  sponsor: User
+    @deprecated(
+      reason: "`Sponsorship.sponsor` will be removed. Use `Sponsorship.sponsorEntity` instead. Removal on 2020-10-01 UTC."
+    )
+
+  """
+  The user or organization that is sponsoring, if you have permission to view them.
+  """
+  sponsorEntity: Sponsor
+
+  """
+  The entity that is being sponsored
+  """
+  sponsorable: Sponsorable!
+
+  """
+  The associated sponsorship tier
+  """
+  tier: SponsorsTier
+
+  """
+  Identifies the date and time when the current tier was chosen for this sponsorship.
+  """
+  tierSelectedAt: DateTime
+}
+
+"""
+The connection type for Sponsorship.
+"""
+type SponsorshipConnection {
+  """
+  A list of edges.
+  """
+  edges: [SponsorshipEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Sponsorship]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+
+  """
+  The total amount in cents of all recurring sponsorships in the connection
+  whose amount you can view. Does not include one-time sponsorships.
+  """
+  totalRecurringMonthlyPriceInCents: Int!
+
+  """
+  The total amount in USD of all recurring sponsorships in the connection whose
+  amount you can view. Does not include one-time sponsorships.
+  """
+  totalRecurringMonthlyPriceInDollars: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SponsorshipEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Sponsorship
+}
+
+"""
+An update sent to sponsors of a user or organization on GitHub Sponsors.
+"""
+type SponsorshipNewsletter implements Node {
+  """
+  The author of the newsletter.
+  """
+  author: User
+
+  """
+  The contents of the newsletter, the message the sponsorable wanted to give.
+  """
+  body: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the SponsorshipNewsletter object
+  """
+  id: ID!
+
+  """
+  Indicates if the newsletter has been made available to sponsors.
+  """
+  isPublished: Boolean!
+
+  """
+  The user or organization this newsletter is from.
+  """
+  sponsorable: Sponsorable!
+
+  """
+  The subject of the newsletter, what it's about.
+  """
+  subject: String!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+The connection type for SponsorshipNewsletter.
+"""
+type SponsorshipNewsletterConnection {
+  """
+  A list of edges.
+  """
+  edges: [SponsorshipNewsletterEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [SponsorshipNewsletter]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SponsorshipNewsletterEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: SponsorshipNewsletter
+}
+
+"""
+Ordering options for sponsorship newsletter connections.
+"""
+input SponsorshipNewsletterOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order sponsorship newsletters by.
+  """
+  field: SponsorshipNewsletterOrderField!
+}
+
+"""
+Properties by which sponsorship update connections can be ordered.
+"""
+enum SponsorshipNewsletterOrderField {
+  """
+  Order sponsorship newsletters by when they were created.
+  """
+  CREATED_AT
+}
+
+"""
+Ordering options for sponsorship connections.
+"""
+input SponsorshipOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order sponsorship by.
+  """
+  field: SponsorshipOrderField!
+}
+
+"""
+Properties by which sponsorship connections can be ordered.
+"""
+enum SponsorshipOrderField {
+  """
+  Order sponsorship by creation time.
+  """
+  CREATED_AT
+}
+
+"""
+How payment was made for funding a GitHub Sponsors sponsorship.
+"""
+enum SponsorshipPaymentSource {
+  """
+  Payment was made through GitHub.
+  """
+  GITHUB
+
+  """
+  Payment was made through Patreon.
+  """
+  PATREON
+}
+
+"""
+The privacy of a sponsorship
+"""
+enum SponsorshipPrivacy {
+  """
+  Private
+  """
+  PRIVATE
+
+  """
+  Public
+  """
+  PUBLIC
+}
+
+"""
+The possible default commit messages for squash merges.
+"""
+enum SquashMergeCommitMessage {
+  """
+  Default to a blank commit message.
+  """
+  BLANK
+
+  """
+  Default to the branch's commit messages.
+  """
+  COMMIT_MESSAGES
+
+  """
+  Default to the pull request's body.
+  """
+  PR_BODY
+}
+
+"""
+The possible default commit titles for squash merges.
+"""
+enum SquashMergeCommitTitle {
+  """
+  Default to the commit's title (if only one commit) or the pull request's title (when more than one commit).
+  """
+  COMMIT_OR_PR_TITLE
+
+  """
+  Default to the pull request's title.
+  """
+  PR_TITLE
+}
+
+"""
+Represents an SSH signature on a Commit or Tag.
+"""
+type SshSignature implements GitSignature {
+  """
+  Email used to sign this object.
+  """
+  email: String!
+
+  """
+  True if the signature is valid and verified by GitHub.
+  """
+  isValid: Boolean!
+
+  """
+  Hex-encoded fingerprint of the key that signed this object.
+  """
+  keyFingerprint: String
+
+  """
+  Payload for GPG signing object. Raw ODB object without the signature header.
+  """
+  payload: String!
+
+  """
+  ASCII-armored signature header from object.
+  """
+  signature: String!
+
+  """
+  GitHub user corresponding to the email signing this commit.
+  """
+  signer: User
+
+  """
+  The state of this signature. `VALID` if signature is valid and verified by
+  GitHub, otherwise represents reason why signature is considered invalid.
+  """
+  state: GitSignatureState!
+
+  """
+  True if the signature was made with GitHub's signing key.
+  """
+  wasSignedByGitHub: Boolean!
+}
+
+"""
+Ways in which star connections can be ordered.
+"""
+input StarOrder {
+  """
+  The direction in which to order nodes.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order nodes by.
+  """
+  field: StarOrderField!
+}
+
+"""
+Properties by which star connections can be ordered.
+"""
+enum StarOrderField {
+  """
+  Allows ordering a list of stars by when they were created.
+  """
+  STARRED_AT
+}
+
+"""
+The connection type for User.
+"""
+type StargazerConnection {
+  """
+  A list of edges.
+  """
+  edges: [StargazerEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a user that's starred a repository.
+"""
+type StargazerEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+  node: User!
+
+  """
+  Identifies when the item was starred.
+  """
+  starredAt: DateTime!
+}
+
+"""
+Things that can be starred.
+"""
+interface Starrable {
+  """
+  The Node ID of the Starrable object
+  """
+  id: ID!
+
+  """
+  Returns a count of how many stargazers there are on this object
+  """
+  stargazerCount: Int!
+
+  """
+  A list of users who have starred this starrable.
+  """
+  stargazers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: StarOrder
+  ): StargazerConnection!
+
+  """
+  Returns a boolean indicating whether the viewing user has starred this starrable.
+  """
+  viewerHasStarred: Boolean!
+}
+
+"""
+The connection type for Repository.
+"""
+type StarredRepositoryConnection {
+  """
+  A list of edges.
+  """
+  edges: [StarredRepositoryEdge]
+
+  """
+  Is the list of stars for this user truncated? This is true for users that have many stars.
+  """
+  isOverLimit: Boolean!
+
+  """
+  A list of nodes.
+  """
+  nodes: [Repository]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a starred repository.
+"""
+type StarredRepositoryEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+  node: Repository!
+
+  """
+  Identifies when the item was starred.
+  """
+  starredAt: DateTime!
+}
+
+"""
+Autogenerated input type of StartOrganizationMigration
+"""
+input StartOrganizationMigrationInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The migration source access token.
+  """
+  sourceAccessToken: String!
+
+  """
+  The URL of the organization to migrate.
+  """
+  sourceOrgUrl: URI!
+
+  """
+  The ID of the enterprise the target organization belongs to.
+  """
+  targetEnterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The name of the target organization.
+  """
+  targetOrgName: String!
+}
+
+"""
+Autogenerated return type of StartOrganizationMigration
+"""
+type StartOrganizationMigrationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new organization migration.
+  """
+  orgMigration: OrganizationMigration
+}
+
+"""
+Autogenerated input type of StartRepositoryMigration
+"""
+input StartRepositoryMigrationInput {
+  """
+  The migration source access token.
+  """
+  accessToken: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Whether to continue the migration on error. Defaults to `true`.
+  """
+  continueOnError: Boolean
+
+  """
+  The signed URL to access the user-uploaded git archive.
+  """
+  gitArchiveUrl: String
+
+  """
+  The GitHub personal access token of the user importing to the target repository.
+  """
+  githubPat: String
+
+  """
+  Whether to lock the source repository.
+  """
+  lockSource: Boolean
+
+  """
+  The signed URL to access the user-uploaded metadata archive.
+  """
+  metadataArchiveUrl: String
+
+  """
+  The ID of the organization that will own the imported repository.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Organization"])
+
+  """
+  The name of the imported repository.
+  """
+  repositoryName: String!
+
+  """
+  Whether to skip migrating releases for the repository.
+  """
+  skipReleases: Boolean
+
+  """
+  The ID of the migration source.
+  """
+  sourceId: ID! @possibleTypes(concreteTypes: ["MigrationSource"])
+
+  """
+  The URL of the source repository.
+  """
+  sourceRepositoryUrl: URI
+
+  """
+  The visibility of the imported repository.
+  """
+  targetRepoVisibility: String
+}
+
+"""
+Autogenerated return type of StartRepositoryMigration
+"""
+type StartRepositoryMigrationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new repository migration.
+  """
+  repositoryMigration: RepositoryMigration
+}
+
+"""
+Represents a commit status.
+"""
+type Status implements Node {
+  """
+  A list of status contexts and check runs for this commit.
+  """
+  combinedContexts(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): StatusCheckRollupContextConnection!
+
+  """
+  The commit this status is attached to.
+  """
+  commit: Commit
+
+  """
+  Looks up an individual status context by context name.
+  """
+  context(
+    """
+    The context name.
+    """
+    name: String!
+  ): StatusContext
+
+  """
+  The individual status contexts for this commit.
+  """
+  contexts: [StatusContext!]!
+
+  """
+  The Node ID of the Status object
+  """
+  id: ID!
+
+  """
+  The combined commit status.
+  """
+  state: StatusState!
+}
+
+"""
+Required status check
+"""
+type StatusCheckConfiguration {
+  """
+  The status check context name that must be present on the commit.
+  """
+  context: String!
+
+  """
+  The optional integration ID that this status check must originate from.
+  """
+  integrationId: Int
+}
+
+"""
+Required status check
+"""
+input StatusCheckConfigurationInput {
+  """
+  The status check context name that must be present on the commit.
+  """
+  context: String!
+
+  """
+  The optional integration ID that this status check must originate from.
+  """
+  integrationId: Int
+}
+
+"""
+Represents the rollup for both the check runs and status for a commit.
+"""
+type StatusCheckRollup implements Node {
+  """
+  The commit the status and check runs are attached to.
+  """
+  commit: Commit
+
+  """
+  A list of status contexts and check runs for this commit.
+  """
+  contexts(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): StatusCheckRollupContextConnection!
+
+  """
+  The Node ID of the StatusCheckRollup object
+  """
+  id: ID!
+
+  """
+  The combined status for the commit.
+  """
+  state: StatusState!
+}
+
+"""
+Types that can be inside a StatusCheckRollup context.
+"""
+union StatusCheckRollupContext = CheckRun | StatusContext
+
+"""
+The connection type for StatusCheckRollupContext.
+"""
+type StatusCheckRollupContextConnection {
+  """
+  The number of check runs in this rollup.
+  """
+  checkRunCount: Int!
+
+  """
+  Counts of check runs by state.
+  """
+  checkRunCountsByState: [CheckRunStateCount!]
+
+  """
+  A list of edges.
+  """
+  edges: [StatusCheckRollupContextEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [StatusCheckRollupContext]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  The number of status contexts in this rollup.
+  """
+  statusContextCount: Int!
+
+  """
+  Counts of status contexts by state.
+  """
+  statusContextCountsByState: [StatusContextStateCount!]
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type StatusCheckRollupContextEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: StatusCheckRollupContext
+}
+
+"""
+Represents an individual commit status context
+"""
+type StatusContext implements Node & RequirableByPullRequest {
+  """
+  The avatar of the OAuth application or the user that created the status
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int = 40
+  ): URI
+
+  """
+  This commit this status context is attached to.
+  """
+  commit: Commit
+
+  """
+  The name of this status context.
+  """
+  context: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The actor who created this status context.
+  """
+  creator: Actor
+
+  """
+  The description for this status context.
+  """
+  description: String
+
+  """
+  The Node ID of the StatusContext object
+  """
+  id: ID!
+
+  """
+  Whether this is required to pass before merging for a specific pull request.
+  """
+  isRequired(
+    """
+    The id of the pull request this is required for
+    """
+    pullRequestId: ID
+
+    """
+    The number of the pull request this is required for
+    """
+    pullRequestNumber: Int
+  ): Boolean!
+
+  """
+  The state of this status context.
+  """
+  state: StatusState!
+
+  """
+  The URL for this status context.
+  """
+  targetUrl: URI
+}
+
+"""
+Represents a count of the state of a status context.
+"""
+type StatusContextStateCount {
+  """
+  The number of statuses with this state.
+  """
+  count: Int!
+
+  """
+  The state of a status context.
+  """
+  state: StatusState!
+}
+
+"""
+The possible commit status states.
+"""
+enum StatusState {
+  """
+  Status is errored.
+  """
+  ERROR
+
+  """
+  Status is expected.
+  """
+  EXPECTED
+
+  """
+  Status is failing.
+  """
+  FAILURE
+
+  """
+  Status is pending.
+  """
+  PENDING
+
+  """
+  Status is successful.
+  """
+  SUCCESS
+}
+
+"""
+A Stripe Connect account for receiving sponsorship funds from GitHub Sponsors.
+"""
+type StripeConnectAccount {
+  """
+  The account number used to identify this Stripe Connect account.
+  """
+  accountId: String!
+
+  """
+  The name of the country or region of an external account, such as a bank
+  account, tied to the Stripe Connect account. Will only return a value when
+  queried by the maintainer of the associated GitHub Sponsors profile
+  themselves, or by an admin of the sponsorable organization.
+  """
+  billingCountryOrRegion: String
+
+  """
+  The name of the country or region of the Stripe Connect account. Will only
+  return a value when queried by the maintainer of the associated GitHub
+  Sponsors profile themselves, or by an admin of the sponsorable organization.
+  """
+  countryOrRegion: String
+
+  """
+  Whether this Stripe Connect account is currently in use for the associated GitHub Sponsors profile.
+  """
+  isActive: Boolean!
+
+  """
+  The GitHub Sponsors profile associated with this Stripe Connect account.
+  """
+  sponsorsListing: SponsorsListing!
+
+  """
+  The URL to access this Stripe Connect account on Stripe's website.
+  """
+  stripeDashboardUrl: URI!
+}
+
+"""
+Autogenerated input type of SubmitPullRequestReview
+"""
+input SubmitPullRequestReviewInput {
+  """
+  The text field to set on the Pull Request Review.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The event to send to the Pull Request Review.
+  """
+  event: PullRequestReviewEvent!
+
+  """
+  The Pull Request ID to submit any pending reviews.
+  """
+  pullRequestId: ID @possibleTypes(concreteTypes: ["PullRequest"])
+
+  """
+  The Pull Request Review ID to submit.
+  """
+  pullRequestReviewId: ID @possibleTypes(concreteTypes: ["PullRequestReview"])
+}
+
+"""
+Autogenerated return type of SubmitPullRequestReview
+"""
+type SubmitPullRequestReviewPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The submitted pull request review.
+  """
+  pullRequestReview: PullRequestReview
+}
+
+"""
+A pointer to a repository at a specific revision embedded inside another repository.
+"""
+type Submodule {
+  """
+  The branch of the upstream submodule for tracking updates
+  """
+  branch: String
+
+  """
+  The git URL of the submodule repository
+  """
+  gitUrl: URI!
+
+  """
+  The name of the submodule in .gitmodules
+  """
+  name: String!
+
+  """
+  The name of the submodule in .gitmodules (Base64-encoded)
+  """
+  nameRaw: Base64String!
+
+  """
+  The path in the superproject that this submodule is located in
+  """
+  path: String!
+
+  """
+  The path in the superproject that this submodule is located in (Base64-encoded)
+  """
+  pathRaw: Base64String!
+
+  """
+  The commit revision of the subproject repository being tracked by the submodule
+  """
+  subprojectCommitOid: GitObjectID
+}
+
+"""
+The connection type for Submodule.
+"""
+type SubmoduleConnection {
+  """
+  A list of edges.
+  """
+  edges: [SubmoduleEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Submodule]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type SubmoduleEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Submodule
+}
+
+"""
+Entities that can be subscribed to for web and email notifications.
+"""
+interface Subscribable {
+  """
+  The Node ID of the Subscribable object
+  """
+  id: ID!
+
+  """
+  Check if the viewer is able to change their subscription status for the repository.
+  """
+  viewerCanSubscribe: Boolean!
+
+  """
+  Identifies if the viewer is watching, not watching, or ignoring the subscribable entity.
+  """
+  viewerSubscription: SubscriptionState
+}
+
+"""
+Entities that can be subscribed to for web and email notifications.
+"""
+interface SubscribableThread {
+  """
+  The Node ID of the SubscribableThread object
+  """
+  id: ID!
+
+  """
+  Identifies the viewer's thread subscription form action.
+  """
+  viewerThreadSubscriptionFormAction: ThreadSubscriptionFormAction
+
+  """
+  Identifies the viewer's thread subscription status.
+  """
+  viewerThreadSubscriptionStatus: ThreadSubscriptionState
+}
+
+"""
+Represents a 'subscribed' event on a given `Subscribable`.
+"""
+type SubscribedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the SubscribedEvent object
+  """
+  id: ID!
+
+  """
+  Object referenced by event.
+  """
+  subscribable: Subscribable!
+}
+
+"""
+The possible states of a subscription.
+"""
+enum SubscriptionState {
+  """
+  The User is never notified.
+  """
+  IGNORED
+
+  """
+  The User is notified of all conversations.
+  """
+  SUBSCRIBED
+
+  """
+  The User is only notified when participating or @mentioned.
+  """
+  UNSUBSCRIBED
+}
+
+"""
+A suggestion to review a pull request based on a user's commit history and review comments.
+"""
+type SuggestedReviewer {
+  """
+  Is this suggestion based on past commits?
+  """
+  isAuthor: Boolean!
+
+  """
+  Is this suggestion based on past review comments?
+  """
+  isCommenter: Boolean!
+
+  """
+  Identifies the user suggested to review the pull request.
+  """
+  reviewer: User!
+}
+
+"""
+Represents a Git tag.
+"""
+type Tag implements GitObject & Node {
+  """
+  An abbreviated version of the Git object ID
+  """
+  abbreviatedOid: String!
+
+  """
+  The HTTP path for this Git object
+  """
+  commitResourcePath: URI!
+
+  """
+  The HTTP URL for this Git object
+  """
+  commitUrl: URI!
+
+  """
+  The Node ID of the Tag object
+  """
+  id: ID!
+
+  """
+  The Git tag message.
+  """
+  message: String
+
+  """
+  The Git tag name.
+  """
+  name: String!
+
+  """
+  The Git object ID
+  """
+  oid: GitObjectID!
+
+  """
+  The Repository the Git object belongs to
+  """
+  repository: Repository!
+
+  """
+  Details about the tag author.
+  """
+  tagger: GitActor
+
+  """
+  The Git object the tag points to.
+  """
+  target: GitObject!
+}
+
+"""
+Parameters to be used for the tag_name_pattern rule
+"""
+type TagNamePatternParameters {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean!
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+Parameters to be used for the tag_name_pattern rule
+"""
+input TagNamePatternParametersInput {
+  """
+  How this rule will appear to users.
+  """
+  name: String
+
+  """
+  If true, the rule will fail if the pattern matches.
+  """
+  negate: Boolean
+
+  """
+  The operator to use for matching.
+  """
+  operator: String!
+
+  """
+  The pattern to match with.
+  """
+  pattern: String!
+}
+
+"""
+A team of users in an organization.
+"""
+type Team implements MemberStatusable & Node & Subscribable {
+  """
+  A list of teams that are ancestors of this team.
+  """
+  ancestors(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): TeamConnection!
+
+  """
+  A URL pointing to the team's avatar.
+  """
+  avatarUrl(
+    """
+    The size in pixels of the resulting square image.
+    """
+    size: Int = 400
+  ): URI
+
+  """
+  List of child teams belonging to this team
+  """
+  childTeams(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Whether to list immediate child teams or all descendant child teams.
+    """
+    immediateOnly: Boolean = true
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: TeamOrder
+
+    """
+    User logins to filter by
+    """
+    userLogins: [String!]
+  ): TeamConnection!
+
+  """
+  The slug corresponding to the organization and team.
+  """
+  combinedSlug: String!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The description of the team.
+  """
+  description: String
+
+  """
+  Find a team discussion by its number.
+  """
+  discussion(
+    """
+    The sequence number of the discussion to find.
+    """
+    number: Int!
+  ): TeamDiscussion
+
+  """
+  A list of team discussions.
+  """
+  discussions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    If provided, filters discussions according to whether or not they are pinned.
+    """
+    isPinned: Boolean
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: TeamDiscussionOrder
+  ): TeamDiscussionConnection!
+
+  """
+  The HTTP path for team discussions
+  """
+  discussionsResourcePath: URI!
+
+  """
+  The HTTP URL for team discussions
+  """
+  discussionsUrl: URI!
+
+  """
+  The HTTP path for editing this team
+  """
+  editTeamResourcePath: URI!
+
+  """
+  The HTTP URL for editing this team
+  """
+  editTeamUrl: URI!
+
+  """
+  The Node ID of the Team object
+  """
+  id: ID!
+
+  """
+  A list of pending invitations for users to this team
+  """
+  invitations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): OrganizationInvitationConnection
+
+  """
+  Get the status messages members of this entity have set that are either public or visible only to the organization.
+  """
+  memberStatuses(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for user statuses returned from the connection.
+    """
+    orderBy: UserStatusOrder = {field: UPDATED_AT, direction: DESC}
+  ): UserStatusConnection!
+
+  """
+  A list of users who are members of this team.
+  """
+  members(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter by membership type
+    """
+    membership: TeamMembershipType = ALL
+
+    """
+    Order for the connection.
+    """
+    orderBy: TeamMemberOrder
+
+    """
+    The search string to look for.
+    """
+    query: String
+
+    """
+    Filter by team member role
+    """
+    role: TeamMemberRole
+  ): TeamMemberConnection!
+
+  """
+  The HTTP path for the team' members
+  """
+  membersResourcePath: URI!
+
+  """
+  The HTTP URL for the team' members
+  """
+  membersUrl: URI!
+
+  """
+  The name of the team.
+  """
+  name: String!
+
+  """
+  The HTTP path creating a new team
+  """
+  newTeamResourcePath: URI!
+
+  """
+  The HTTP URL creating a new team
+  """
+  newTeamUrl: URI!
+
+  """
+  The notification setting that the team has set.
+  """
+  notificationSetting: TeamNotificationSetting!
+
+  """
+  The organization that owns this team.
+  """
+  organization: Organization!
+
+  """
+  The parent team of the team.
+  """
+  parentTeam: Team
+
+  """
+  The level of privacy the team has.
+  """
+  privacy: TeamPrivacy!
+
+  """
+  Finds and returns the project according to the provided project number.
+  """
+  projectV2(
+    """
+    The Project number.
+    """
+    number: Int!
+  ): ProjectV2
+
+  """
+  List of projects this team has collaborator access to.
+  """
+  projectsV2(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Filtering options for projects returned from this connection
+    """
+    filterBy: ProjectV2Filters = {}
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    How to order the returned projects.
+    """
+    orderBy: ProjectV2Order = {field: NUMBER, direction: DESC}
+
+    """
+    The query to search projects by.
+    """
+    query: String = ""
+  ): ProjectV2Connection!
+
+  """
+  A list of repositories this team has access to.
+  """
+  repositories(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for the connection.
+    """
+    orderBy: TeamRepositoryOrder
+
+    """
+    The search string to look for. Repositories will be returned where the name contains your search string.
+    """
+    query: String
+  ): TeamRepositoryConnection!
+
+  """
+  The HTTP path for this team's repositories
+  """
+  repositoriesResourcePath: URI!
+
+  """
+  The HTTP URL for this team's repositories
+  """
+  repositoriesUrl: URI!
+
+  """
+  The HTTP path for this team
+  """
+  resourcePath: URI!
+
+  """
+  What algorithm is used for review assignment for this team
+  """
+  reviewRequestDelegationAlgorithm: TeamReviewAssignmentAlgorithm @preview(toggledBy: "stone-crop-preview")
+
+  """
+  True if review assignment is enabled for this team
+  """
+  reviewRequestDelegationEnabled: Boolean! @preview(toggledBy: "stone-crop-preview")
+
+  """
+  How many team members are required for review assignment for this team
+  """
+  reviewRequestDelegationMemberCount: Int @preview(toggledBy: "stone-crop-preview")
+
+  """
+  When assigning team members via delegation, whether the entire team should be notified as well.
+  """
+  reviewRequestDelegationNotifyTeam: Boolean! @preview(toggledBy: "stone-crop-preview")
+
+  """
+  The slug corresponding to the team.
+  """
+  slug: String!
+
+  """
+  The HTTP path for this team's teams
+  """
+  teamsResourcePath: URI!
+
+  """
+  The HTTP URL for this team's teams
+  """
+  teamsUrl: URI!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this team
+  """
+  url: URI!
+
+  """
+  Team is adminable by the viewer.
+  """
+  viewerCanAdminister: Boolean!
+
+  """
+  Check if the viewer is able to change their subscription status for the repository.
+  """
+  viewerCanSubscribe: Boolean!
+
+  """
+  Identifies if the viewer is watching, not watching, or ignoring the subscribable entity.
+  """
+  viewerSubscription: SubscriptionState
+}
+
+"""
+Audit log entry for a team.add_member event.
+"""
+type TeamAddMemberAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & TeamAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the TeamAddMemberAuditEntry object
+  """
+  id: ID!
+
+  """
+  Whether the team was mapped to an LDAP Group.
+  """
+  isLdapMapped: Boolean
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The team associated with the action
+  """
+  team: Team
+
+  """
+  The name of the team
+  """
+  teamName: String
+
+  """
+  The HTTP path for this team
+  """
+  teamResourcePath: URI
+
+  """
+  The HTTP URL for this team
+  """
+  teamUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a team.add_repository event.
+"""
+type TeamAddRepositoryAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData & TeamAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the TeamAddRepositoryAuditEntry object
+  """
+  id: ID!
+
+  """
+  Whether the team was mapped to an LDAP Group.
+  """
+  isLdapMapped: Boolean
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The team associated with the action
+  """
+  team: Team
+
+  """
+  The name of the team
+  """
+  teamName: String
+
+  """
+  The HTTP path for this team
+  """
+  teamResourcePath: URI
+
+  """
+  The HTTP URL for this team
+  """
+  teamUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Metadata for an audit entry with action team.*
+"""
+interface TeamAuditEntryData {
+  """
+  The team associated with the action
+  """
+  team: Team
+
+  """
+  The name of the team
+  """
+  teamName: String
+
+  """
+  The HTTP path for this team
+  """
+  teamResourcePath: URI
+
+  """
+  The HTTP URL for this team
+  """
+  teamUrl: URI
+}
+
+"""
+Audit log entry for a team.change_parent_team event.
+"""
+type TeamChangeParentTeamAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & TeamAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the TeamChangeParentTeamAuditEntry object
+  """
+  id: ID!
+
+  """
+  Whether the team was mapped to an LDAP Group.
+  """
+  isLdapMapped: Boolean
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The new parent team.
+  """
+  parentTeam: Team
+
+  """
+  The name of the new parent team
+  """
+  parentTeamName: String
+
+  """
+  The name of the former parent team
+  """
+  parentTeamNameWas: String
+
+  """
+  The HTTP path for the parent team
+  """
+  parentTeamResourcePath: URI
+
+  """
+  The HTTP URL for the parent team
+  """
+  parentTeamUrl: URI
+
+  """
+  The former parent team.
+  """
+  parentTeamWas: Team
+
+  """
+  The HTTP path for the previous parent team
+  """
+  parentTeamWasResourcePath: URI
+
+  """
+  The HTTP URL for the previous parent team
+  """
+  parentTeamWasUrl: URI
+
+  """
+  The team associated with the action
+  """
+  team: Team
+
+  """
+  The name of the team
+  """
+  teamName: String
+
+  """
+  The HTTP path for this team
+  """
+  teamResourcePath: URI
+
+  """
+  The HTTP URL for this team
+  """
+  teamUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The connection type for Team.
+"""
+type TeamConnection {
+  """
+  A list of edges.
+  """
+  edges: [TeamEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Team]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+A team discussion.
+"""
+type TeamDiscussion implements Comment & Deletable & Node & Reactable & Subscribable & UniformResourceLocatable & Updatable & UpdatableComment {
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the discussion's team.
+  """
+  authorAssociation: CommentAuthorAssociation!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  The body as Markdown.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body rendered to text.
+  """
+  bodyText: String!
+
+  """
+  Identifies the discussion body hash.
+  """
+  bodyVersion: String!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  A list of comments on this discussion.
+  """
+  comments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    When provided, filters the connection such that results begin with the comment with this number.
+    """
+    fromComment: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: TeamDiscussionCommentOrder
+  ): TeamDiscussionCommentConnection!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  The HTTP path for discussion comments
+  """
+  commentsResourcePath: URI!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  The HTTP URL for discussion comments
+  """
+  commentsUrl: URI!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  The Node ID of the TeamDiscussion object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  Whether or not the discussion is pinned.
+  """
+  isPinned: Boolean!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  Whether or not the discussion is only visible to team members and organization owners.
+  """
+  isPrivate: Boolean!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  Identifies the discussion within its team.
+  """
+  number: Int!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The HTTP path for this discussion
+  """
+  resourcePath: URI!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  The team that defines the context of this discussion.
+  """
+  team: Team!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  The title of the discussion
+  """
+  title: String!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this discussion
+  """
+  url: URI!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Whether or not the current viewer can pin this discussion.
+  """
+  viewerCanPin: Boolean!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Check if the viewer is able to change their subscription status for the repository.
+  """
+  viewerCanSubscribe: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+
+  """
+  Identifies if the viewer is watching, not watching, or ignoring the subscribable entity.
+  """
+  viewerSubscription: SubscriptionState
+}
+
+"""
+A comment on a team discussion.
+"""
+type TeamDiscussionComment implements Comment & Deletable & Node & Reactable & UniformResourceLocatable & Updatable & UpdatableComment {
+  """
+  The actor who authored the comment.
+  """
+  author: Actor
+
+  """
+  Author's association with the comment's team.
+  """
+  authorAssociation: CommentAuthorAssociation!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  The body as Markdown.
+  """
+  body: String!
+
+  """
+  The body rendered to HTML.
+  """
+  bodyHTML: HTML!
+
+  """
+  The body rendered to text.
+  """
+  bodyText: String!
+
+  """
+  The current version of the body content.
+  """
+  bodyVersion: String!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Check if this comment was created via an email reply.
+  """
+  createdViaEmail: Boolean!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The discussion this comment is about.
+  """
+  discussion: TeamDiscussion!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  The actor who edited the comment.
+  """
+  editor: Actor
+
+  """
+  The Node ID of the TeamDiscussionComment object
+  """
+  id: ID!
+
+  """
+  Check if this comment was edited and includes an edit with the creation data
+  """
+  includesCreatedEdit: Boolean!
+
+  """
+  The moment the editor made the last edit
+  """
+  lastEditedAt: DateTime
+
+  """
+  Identifies the comment number.
+  """
+  number: Int!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  Identifies when the comment was published at.
+  """
+  publishedAt: DateTime
+
+  """
+  A list of reactions grouped by content left on the subject.
+  """
+  reactionGroups: [ReactionGroup!]
+
+  """
+  A list of Reactions left on the Issue.
+  """
+  reactions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Allows filtering Reactions by emoji.
+    """
+    content: ReactionContent
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Allows specifying the order in which reactions are returned.
+    """
+    orderBy: ReactionOrder
+  ): ReactionConnection!
+
+  """
+  The HTTP path for this comment
+  """
+  resourcePath: URI!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this comment
+  """
+  url: URI!
+    @deprecated(
+      reason: "The Team Discussions feature is deprecated in favor of Organization Discussions. Follow the guide at https://github.blog/changelog/2023-02-08-sunset-notice-team-discussions/ to find a suitable replacement. Removal on 2024-07-01 UTC."
+    )
+
+  """
+  A list of edits to this content.
+  """
+  userContentEdits(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): UserContentEditConnection
+
+  """
+  Check if the current viewer can delete this object.
+  """
+  viewerCanDelete: Boolean!
+
+  """
+  Can user react to this subject
+  """
+  viewerCanReact: Boolean!
+
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+
+  """
+  Did the viewer author this comment.
+  """
+  viewerDidAuthor: Boolean!
+}
+
+"""
+The connection type for TeamDiscussionComment.
+"""
+type TeamDiscussionCommentConnection {
+  """
+  A list of edges.
+  """
+  edges: [TeamDiscussionCommentEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [TeamDiscussionComment]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type TeamDiscussionCommentEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: TeamDiscussionComment
+}
+
+"""
+Ways in which team discussion comment connections can be ordered.
+"""
+input TeamDiscussionCommentOrder {
+  """
+  The direction in which to order nodes.
+  """
+  direction: OrderDirection!
+
+  """
+  The field by which to order nodes.
+  """
+  field: TeamDiscussionCommentOrderField!
+}
+
+"""
+Properties by which team discussion comment connections can be ordered.
+"""
+enum TeamDiscussionCommentOrderField {
+  """
+  Allows sequential ordering of team discussion comments (which is equivalent to chronological ordering).
+  """
+  NUMBER
+}
+
+"""
+The connection type for TeamDiscussion.
+"""
+type TeamDiscussionConnection {
+  """
+  A list of edges.
+  """
+  edges: [TeamDiscussionEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [TeamDiscussion]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type TeamDiscussionEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: TeamDiscussion
+}
+
+"""
+Ways in which team discussion connections can be ordered.
+"""
+input TeamDiscussionOrder {
+  """
+  The direction in which to order nodes.
+  """
+  direction: OrderDirection!
+
+  """
+  The field by which to order nodes.
+  """
+  field: TeamDiscussionOrderField!
+}
+
+"""
+Properties by which team discussion connections can be ordered.
+"""
+enum TeamDiscussionOrderField {
+  """
+  Allows chronological ordering of team discussions.
+  """
+  CREATED_AT
+}
+
+"""
+An edge in a connection.
+"""
+type TeamEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: Team
+}
+
+"""
+The connection type for User.
+"""
+type TeamMemberConnection {
+  """
+  A list of edges.
+  """
+  edges: [TeamMemberEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a user who is a member of a team.
+"""
+type TeamMemberEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The HTTP path to the organization's member access page.
+  """
+  memberAccessResourcePath: URI!
+
+  """
+  The HTTP URL to the organization's member access page.
+  """
+  memberAccessUrl: URI!
+  node: User!
+
+  """
+  The role the member has on the team.
+  """
+  role: TeamMemberRole!
+}
+
+"""
+Ordering options for team member connections
+"""
+input TeamMemberOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order team members by.
+  """
+  field: TeamMemberOrderField!
+}
+
+"""
+Properties by which team member connections can be ordered.
+"""
+enum TeamMemberOrderField {
+  """
+  Order team members by creation time
+  """
+  CREATED_AT
+
+  """
+  Order team members by login
+  """
+  LOGIN
+}
+
+"""
+The possible team member roles; either 'maintainer' or 'member'.
+"""
+enum TeamMemberRole {
+  """
+  A team maintainer has permission to add and remove team members.
+  """
+  MAINTAINER
+
+  """
+  A team member has no administrative permissions on the team.
+  """
+  MEMBER
+}
+
+"""
+Defines which types of team members are included in the returned list. Can be one of IMMEDIATE, CHILD_TEAM or ALL.
+"""
+enum TeamMembershipType {
+  """
+  Includes immediate and child team members for the team.
+  """
+  ALL
+
+  """
+  Includes only child team members for the team.
+  """
+  CHILD_TEAM
+
+  """
+  Includes only immediate members of the team.
+  """
+  IMMEDIATE
+}
+
+"""
+The possible team notification values.
+"""
+enum TeamNotificationSetting {
+  """
+  No one will receive notifications.
+  """
+  NOTIFICATIONS_DISABLED
+
+  """
+  Everyone will receive notifications when the team is @mentioned.
+  """
+  NOTIFICATIONS_ENABLED
+}
+
+"""
+Ways in which team connections can be ordered.
+"""
+input TeamOrder {
+  """
+  The direction in which to order nodes.
+  """
+  direction: OrderDirection!
+
+  """
+  The field in which to order nodes by.
+  """
+  field: TeamOrderField!
+}
+
+"""
+Properties by which team connections can be ordered.
+"""
+enum TeamOrderField {
+  """
+  Allows ordering a list of teams by name.
+  """
+  NAME
+}
+
+"""
+The possible team privacy values.
+"""
+enum TeamPrivacy {
+  """
+  A secret team can only be seen by its members.
+  """
+  SECRET
+
+  """
+  A visible team can be seen and @mentioned by every member of the organization.
+  """
+  VISIBLE
+}
+
+"""
+Audit log entry for a team.remove_member event.
+"""
+type TeamRemoveMemberAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & TeamAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the TeamRemoveMemberAuditEntry object
+  """
+  id: ID!
+
+  """
+  Whether the team was mapped to an LDAP Group.
+  """
+  isLdapMapped: Boolean
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The team associated with the action
+  """
+  team: Team
+
+  """
+  The name of the team
+  """
+  teamName: String
+
+  """
+  The HTTP path for this team
+  """
+  teamResourcePath: URI
+
+  """
+  The HTTP URL for this team
+  """
+  teamUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+Audit log entry for a team.remove_repository event.
+"""
+type TeamRemoveRepositoryAuditEntry implements AuditEntry & Node & OrganizationAuditEntryData & RepositoryAuditEntryData & TeamAuditEntryData {
+  """
+  The action name
+  """
+  action: String!
+
+  """
+  The user who initiated the action
+  """
+  actor: AuditEntryActor
+
+  """
+  The IP address of the actor
+  """
+  actorIp: String
+
+  """
+  A readable representation of the actor's location
+  """
+  actorLocation: ActorLocation
+
+  """
+  The username of the user who initiated the action
+  """
+  actorLogin: String
+
+  """
+  The HTTP path for the actor.
+  """
+  actorResourcePath: URI
+
+  """
+  The HTTP URL for the actor.
+  """
+  actorUrl: URI
+
+  """
+  The time the action was initiated
+  """
+  createdAt: PreciseDateTime!
+
+  """
+  The Node ID of the TeamRemoveRepositoryAuditEntry object
+  """
+  id: ID!
+
+  """
+  Whether the team was mapped to an LDAP Group.
+  """
+  isLdapMapped: Boolean
+
+  """
+  The corresponding operation type for the action
+  """
+  operationType: OperationType
+
+  """
+  The Organization associated with the Audit Entry.
+  """
+  organization: Organization
+
+  """
+  The name of the Organization.
+  """
+  organizationName: String
+
+  """
+  The HTTP path for the organization
+  """
+  organizationResourcePath: URI
+
+  """
+  The HTTP URL for the organization
+  """
+  organizationUrl: URI
+
+  """
+  The repository associated with the action
+  """
+  repository: Repository
+
+  """
+  The name of the repository
+  """
+  repositoryName: String
+
+  """
+  The HTTP path for the repository
+  """
+  repositoryResourcePath: URI
+
+  """
+  The HTTP URL for the repository
+  """
+  repositoryUrl: URI
+
+  """
+  The team associated with the action
+  """
+  team: Team
+
+  """
+  The name of the team
+  """
+  teamName: String
+
+  """
+  The HTTP path for this team
+  """
+  teamResourcePath: URI
+
+  """
+  The HTTP URL for this team
+  """
+  teamUrl: URI
+
+  """
+  The user affected by the action
+  """
+  user: User
+
+  """
+  For actions involving two users, the actor is the initiator and the user is the affected user.
+  """
+  userLogin: String
+
+  """
+  The HTTP path for the user.
+  """
+  userResourcePath: URI
+
+  """
+  The HTTP URL for the user.
+  """
+  userUrl: URI
+}
+
+"""
+The connection type for Repository.
+"""
+type TeamRepositoryConnection {
+  """
+  A list of edges.
+  """
+  edges: [TeamRepositoryEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [Repository]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+Represents a team repository.
+"""
+type TeamRepositoryEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+  node: Repository!
+
+  """
+  The permission level the team has on the repository
+  """
+  permission: RepositoryPermission!
+}
+
+"""
+Ordering options for team repository connections
+"""
+input TeamRepositoryOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order repositories by.
+  """
+  field: TeamRepositoryOrderField!
+}
+
+"""
+Properties by which team repository connections can be ordered.
+"""
+enum TeamRepositoryOrderField {
+  """
+  Order repositories by creation time
+  """
+  CREATED_AT
+
+  """
+  Order repositories by name
+  """
+  NAME
+
+  """
+  Order repositories by permission
+  """
+  PERMISSION
+
+  """
+  Order repositories by push time
+  """
+  PUSHED_AT
+
+  """
+  Order repositories by number of stargazers
+  """
+  STARGAZERS
+
+  """
+  Order repositories by update time
+  """
+  UPDATED_AT
+}
+
+"""
+The possible team review assignment algorithms
+"""
+enum TeamReviewAssignmentAlgorithm @preview(toggledBy: "stone-crop-preview") {
+  """
+  Balance review load across the entire team
+  """
+  LOAD_BALANCE
+
+  """
+  Alternate reviews between each team member
+  """
+  ROUND_ROBIN
+}
+
+"""
+The role of a user on a team.
+"""
+enum TeamRole {
+  """
+  User has admin rights on the team.
+  """
+  ADMIN
+
+  """
+  User is a member of the team.
+  """
+  MEMBER
+}
+
+"""
+A text match within a search result.
+"""
+type TextMatch {
+  """
+  The specific text fragment within the property matched on.
+  """
+  fragment: String!
+
+  """
+  Highlights within the matched fragment.
+  """
+  highlights: [TextMatchHighlight!]!
+
+  """
+  The property matched on.
+  """
+  property: String!
+}
+
+"""
+Represents a single highlight in a search result match.
+"""
+type TextMatchHighlight {
+  """
+  The indice in the fragment where the matched text begins.
+  """
+  beginIndice: Int!
+
+  """
+  The indice in the fragment where the matched text ends.
+  """
+  endIndice: Int!
+
+  """
+  The text matched.
+  """
+  text: String!
+}
+
+"""
+The possible states of a thread subscription form action
+"""
+enum ThreadSubscriptionFormAction {
+  """
+  The User cannot subscribe or unsubscribe to the thread
+  """
+  NONE
+
+  """
+  The User can subscribe to the thread
+  """
+  SUBSCRIBE
+
+  """
+  The User can unsubscribe to the thread
+  """
+  UNSUBSCRIBE
+}
+
+"""
+The possible states of a subscription.
+"""
+enum ThreadSubscriptionState {
+  """
+  The subscription status is currently disabled.
+  """
+  DISABLED
+
+  """
+  The User is never notified because they are ignoring the list
+  """
+  IGNORING_LIST
+
+  """
+  The User is never notified because they are ignoring the thread
+  """
+  IGNORING_THREAD
+
+  """
+  The User is not receiving notifications from this thread
+  """
+  NONE
+
+  """
+  The User is notified because they are watching the list
+  """
+  SUBSCRIBED_TO_LIST
+
+  """
+  The User is notified because they are subscribed to the thread
+  """
+  SUBSCRIBED_TO_THREAD
+
+  """
+  The User is notified because they chose custom settings for this thread.
+  """
+  SUBSCRIBED_TO_THREAD_EVENTS
+
+  """
+  The User is notified because they chose custom settings for this thread.
+  """
+  SUBSCRIBED_TO_THREAD_TYPE
+
+  """
+  The subscription status is currently unavailable.
+  """
+  UNAVAILABLE
+}
+
+"""
+A topic aggregates entities that are related to a subject.
+"""
+type Topic implements Node & Starrable {
+  """
+  The Node ID of the Topic object
+  """
+  id: ID!
+
+  """
+  The topic's name.
+  """
+  name: String!
+
+  """
+  A list of related topics, including aliases of this topic, sorted with the most relevant
+  first. Returns up to 10 Topics.
+  """
+  relatedTopics(
+    """
+    How many topics to return.
+    """
+    first: Int = 3
+  ): [Topic!]!
+
+  """
+  A list of repositories.
+  """
+  repositories(
+    """
+    Array of viewer's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    current viewer owns.
+    """
+    affiliations: [RepositoryAffiliation]
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    If non-null, filters repositories according to whether they have issues enabled
+    """
+    hasIssuesEnabled: Boolean
+
+    """
+    If non-null, filters repositories according to whether they have been locked
+    """
+    isLocked: Boolean
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories returned from the connection
+    """
+    orderBy: RepositoryOrder
+
+    """
+    Array of owner's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    organization or user being viewed owns.
+    """
+    ownerAffiliations: [RepositoryAffiliation] = [OWNER, COLLABORATOR]
+
+    """
+    If non-null, filters repositories according to privacy
+    """
+    privacy: RepositoryPrivacy
+
+    """
+    If true, only repositories whose owner can be sponsored via GitHub Sponsors will be returned.
+    """
+    sponsorableOnly: Boolean = false
+  ): RepositoryConnection!
+
+  """
+  Returns a count of how many stargazers there are on this object
+  """
+  stargazerCount: Int!
+
+  """
+  A list of users who have starred this starrable.
+  """
+  stargazers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: StarOrder
+  ): StargazerConnection!
+
+  """
+  Returns a boolean indicating whether the viewing user has starred this starrable.
+  """
+  viewerHasStarred: Boolean!
+}
+
+"""
+Metadata for an audit entry with a topic.
+"""
+interface TopicAuditEntryData {
+  """
+  The name of the topic added to the repository
+  """
+  topic: Topic
+
+  """
+  The name of the topic added to the repository
+  """
+  topicName: String
+}
+
+"""
+Reason that the suggested topic is declined.
+"""
+enum TopicSuggestionDeclineReason {
+  """
+  The suggested topic is not relevant to the repository.
+  """
+  NOT_RELEVANT
+
+  """
+  The viewer does not like the suggested topic.
+  """
+  PERSONAL_PREFERENCE
+
+  """
+  The suggested topic is too general for the repository.
+  """
+  TOO_GENERAL
+
+  """
+  The suggested topic is too specific for the repository (e.g. #ruby-on-rails-version-4-2-1).
+  """
+  TOO_SPECIFIC
+}
+
+"""
+The possible states of a tracked issue.
+"""
+enum TrackedIssueStates {
+  """
+  The tracked issue is closed
+  """
+  CLOSED
+
+  """
+  The tracked issue is open
+  """
+  OPEN
+}
+
+"""
+Autogenerated input type of TransferEnterpriseOrganization
+"""
+input TransferEnterpriseOrganizationInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise where the organization should be transferred.
+  """
+  destinationEnterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The ID of the organization to transfer.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of TransferEnterpriseOrganization
+"""
+type TransferEnterpriseOrganizationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The organization for which a transfer was initiated.
+  """
+  organization: Organization
+}
+
+"""
+Autogenerated input type of TransferIssue
+"""
+input TransferIssueInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Whether to create labels if they don't exist in the target repository (matched by name)
+  """
+  createLabelsIfMissing: Boolean = false
+
+  """
+  The Node ID of the issue to be transferred
+  """
+  issueId: ID! @possibleTypes(concreteTypes: ["Issue"])
+
+  """
+  The Node ID of the repository the issue should be transferred to
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of TransferIssue
+"""
+type TransferIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The issue that was transferred
+  """
+  issue: Issue
+}
+
+"""
+Represents a 'transferred' event on a given issue or pull request.
+"""
+type TransferredEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The repository this came from
+  """
+  fromRepository: Repository
+
+  """
+  The Node ID of the TransferredEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the issue associated with the event.
+  """
+  issue: Issue!
+}
+
+"""
+Represents a Git tree.
+"""
+type Tree implements GitObject & Node {
+  """
+  An abbreviated version of the Git object ID
+  """
+  abbreviatedOid: String!
+
+  """
+  The HTTP path for this Git object
+  """
+  commitResourcePath: URI!
+
+  """
+  The HTTP URL for this Git object
+  """
+  commitUrl: URI!
+
+  """
+  A list of tree entries.
+  """
+  entries: [TreeEntry!]
+
+  """
+  The Node ID of the Tree object
+  """
+  id: ID!
+
+  """
+  The Git object ID
+  """
+  oid: GitObjectID!
+
+  """
+  The Repository the Git object belongs to
+  """
+  repository: Repository!
+}
+
+"""
+Represents a Git tree entry.
+"""
+type TreeEntry {
+  """
+  The extension of the file
+  """
+  extension: String
+
+  """
+  Whether or not this tree entry is generated
+  """
+  isGenerated: Boolean!
+
+  """
+  The programming language this file is written in.
+  """
+  language: Language
+
+  """
+  Number of lines in the file.
+  """
+  lineCount: Int
+
+  """
+  Entry file mode.
+  """
+  mode: Int!
+
+  """
+  Entry file name.
+  """
+  name: String!
+
+  """
+  Entry file name. (Base64-encoded)
+  """
+  nameRaw: Base64String!
+
+  """
+  Entry file object.
+  """
+  object: GitObject
+
+  """
+  Entry file Git object ID.
+  """
+  oid: GitObjectID!
+
+  """
+  The full path of the file.
+  """
+  path: String
+
+  """
+  The full path of the file. (Base64-encoded)
+  """
+  pathRaw: Base64String
+
+  """
+  The Repository the tree entry belongs to
+  """
+  repository: Repository!
+
+  """
+  Entry byte size
+  """
+  size: Int!
+
+  """
+  If the TreeEntry is for a directory occupied by a submodule project, this returns the corresponding submodule
+  """
+  submodule: Submodule
+
+  """
+  Entry file type.
+  """
+  type: String!
+}
+
+"""
+An RFC 3986, RFC 3987, and RFC 6570 (level 4) compliant URI string.
+"""
+scalar URI
+
+"""
+Autogenerated input type of UnarchiveProjectV2Item
+"""
+input UnarchiveProjectV2ItemInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the ProjectV2Item to unarchive.
+  """
+  itemId: ID! @possibleTypes(concreteTypes: ["ProjectV2Item"])
+
+  """
+  The ID of the Project to archive the item from.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated return type of UnarchiveProjectV2Item
+"""
+type UnarchiveProjectV2ItemPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The item unarchived from the project.
+  """
+  item: ProjectV2Item
+}
+
+"""
+Autogenerated input type of UnarchiveRepository
+"""
+input UnarchiveRepositoryInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the repository to unarchive.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of UnarchiveRepository
+"""
+type UnarchiveRepositoryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The repository that was unarchived.
+  """
+  repository: Repository
+}
+
+"""
+Represents an 'unassigned' event on any assignable object.
+"""
+type UnassignedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the assignable associated with the event.
+  """
+  assignable: Assignable!
+
+  """
+  Identifies the user or mannequin that was unassigned.
+  """
+  assignee: Assignee
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the UnassignedEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the subject (user) who was unassigned.
+  """
+  user: User
+    @deprecated(reason: "Assignees can now be mannequins. Use the `assignee` field instead. Removal on 2020-01-01 UTC.")
+}
+
+"""
+Autogenerated input type of UnfollowOrganization
+"""
+input UnfollowOrganizationInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the organization to unfollow.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of UnfollowOrganization
+"""
+type UnfollowOrganizationPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The organization that was unfollowed.
+  """
+  organization: Organization
+}
+
+"""
+Autogenerated input type of UnfollowUser
+"""
+input UnfollowUserInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the user to unfollow.
+  """
+  userId: ID! @possibleTypes(concreteTypes: ["User"])
+}
+
+"""
+Autogenerated return type of UnfollowUser
+"""
+type UnfollowUserPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The user that was unfollowed.
+  """
+  user: User
+}
+
+"""
+Represents a type that can be retrieved by a URL.
+"""
+interface UniformResourceLocatable {
+  """
+  The HTML path to this resource.
+  """
+  resourcePath: URI!
+
+  """
+  The URL to this resource.
+  """
+  url: URI!
+}
+
+"""
+Represents an unknown signature on a Commit or Tag.
+"""
+type UnknownSignature implements GitSignature {
+  """
+  Email used to sign this object.
+  """
+  email: String!
+
+  """
+  True if the signature is valid and verified by GitHub.
+  """
+  isValid: Boolean!
+
+  """
+  Payload for GPG signing object. Raw ODB object without the signature header.
+  """
+  payload: String!
+
+  """
+  ASCII-armored signature header from object.
+  """
+  signature: String!
+
+  """
+  GitHub user corresponding to the email signing this commit.
+  """
+  signer: User
+
+  """
+  The state of this signature. `VALID` if signature is valid and verified by
+  GitHub, otherwise represents reason why signature is considered invalid.
+  """
+  state: GitSignatureState!
+
+  """
+  True if the signature was made with GitHub's signing key.
+  """
+  wasSignedByGitHub: Boolean!
+}
+
+"""
+Represents an 'unlabeled' event on a given issue or pull request.
+"""
+type UnlabeledEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the UnlabeledEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the label associated with the 'unlabeled' event.
+  """
+  label: Label!
+
+  """
+  Identifies the `Labelable` associated with the event.
+  """
+  labelable: Labelable!
+}
+
+"""
+Autogenerated input type of UnlinkProjectV2FromRepository
+"""
+input UnlinkProjectV2FromRepositoryInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the project to unlink from the repository.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+
+  """
+  The ID of the repository to unlink from the project.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of UnlinkProjectV2FromRepository
+"""
+type UnlinkProjectV2FromRepositoryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The repository the project is no longer linked to.
+  """
+  repository: Repository
+}
+
+"""
+Autogenerated input type of UnlinkProjectV2FromTeam
+"""
+input UnlinkProjectV2FromTeamInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the project to unlink from the team.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+
+  """
+  The ID of the team to unlink from the project.
+  """
+  teamId: ID! @possibleTypes(concreteTypes: ["Team"])
+}
+
+"""
+Autogenerated return type of UnlinkProjectV2FromTeam
+"""
+type UnlinkProjectV2FromTeamPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The team the project is unlinked from
+  """
+  team: Team
+}
+
+"""
+Autogenerated input type of UnlinkRepositoryFromProject
+"""
+input UnlinkRepositoryFromProjectInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Project linked to the Repository.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["Project"])
+
+  """
+  The ID of the Repository linked to the Project.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of UnlinkRepositoryFromProject
+"""
+type UnlinkRepositoryFromProjectPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The linked Project.
+  """
+  project: Project
+
+  """
+  The linked Repository.
+  """
+  repository: Repository
+}
+
+"""
+Autogenerated input type of UnlockLockable
+"""
+input UnlockLockableInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the item to be unlocked.
+  """
+  lockableId: ID! @possibleTypes(concreteTypes: ["Discussion", "Issue", "PullRequest"], abstractType: "Lockable")
+}
+
+"""
+Autogenerated return type of UnlockLockable
+"""
+type UnlockLockablePayload {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The item that was unlocked.
+  """
+  unlockedRecord: Lockable
+}
+
+"""
+Represents an 'unlocked' event on a given issue or pull request.
+"""
+type UnlockedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the UnlockedEvent object
+  """
+  id: ID!
+
+  """
+  Object that was unlocked.
+  """
+  lockable: Lockable!
+}
+
+"""
+Autogenerated input type of UnmarkDiscussionCommentAsAnswer
+"""
+input UnmarkDiscussionCommentAsAnswerInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the discussion comment to unmark as an answer.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["DiscussionComment"])
+}
+
+"""
+Autogenerated return type of UnmarkDiscussionCommentAsAnswer
+"""
+type UnmarkDiscussionCommentAsAnswerPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The discussion that includes the comment.
+  """
+  discussion: Discussion
+}
+
+"""
+Autogenerated input type of UnmarkFileAsViewed
+"""
+input UnmarkFileAsViewedInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The path of the file to mark as unviewed
+  """
+  path: String!
+
+  """
+  The Node ID of the pull request.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+}
+
+"""
+Autogenerated return type of UnmarkFileAsViewed
+"""
+type UnmarkFileAsViewedPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated pull request.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Autogenerated input type of UnmarkIssueAsDuplicate
+"""
+input UnmarkIssueAsDuplicateInput {
+  """
+  ID of the issue or pull request currently considered canonical/authoritative/original.
+  """
+  canonicalId: ID! @possibleTypes(concreteTypes: ["Issue", "PullRequest"], abstractType: "IssueOrPullRequest")
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  ID of the issue or pull request currently marked as a duplicate.
+  """
+  duplicateId: ID! @possibleTypes(concreteTypes: ["Issue", "PullRequest"], abstractType: "IssueOrPullRequest")
+}
+
+"""
+Autogenerated return type of UnmarkIssueAsDuplicate
+"""
+type UnmarkIssueAsDuplicatePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The issue or pull request that was marked as a duplicate.
+  """
+  duplicate: IssueOrPullRequest
+}
+
+"""
+Autogenerated input type of UnmarkProjectV2AsTemplate
+"""
+input UnmarkProjectV2AsTemplateInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Project to unmark as a template.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated return type of UnmarkProjectV2AsTemplate
+"""
+type UnmarkProjectV2AsTemplatePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The project.
+  """
+  projectV2: ProjectV2
+}
+
+"""
+Represents an 'unmarked_as_duplicate' event on a given issue or pull request.
+"""
+type UnmarkedAsDuplicateEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  The authoritative issue or pull request which has been duplicated by another.
+  """
+  canonical: IssueOrPullRequest
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The issue or pull request which has been marked as a duplicate of another.
+  """
+  duplicate: IssueOrPullRequest
+
+  """
+  The Node ID of the UnmarkedAsDuplicateEvent object
+  """
+  id: ID!
+
+  """
+  Canonical and duplicate belong to different repositories.
+  """
+  isCrossRepository: Boolean!
+}
+
+"""
+Autogenerated input type of UnminimizeComment
+"""
+input UnminimizeCommentInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the subject to modify.
+  """
+  subjectId: ID!
+    @possibleTypes(
+      concreteTypes: [
+        "CommitComment"
+        "DiscussionComment"
+        "GistComment"
+        "IssueComment"
+        "PullRequestReview"
+        "PullRequestReviewComment"
+      ]
+      abstractType: "Minimizable"
+    )
+}
+
+"""
+Autogenerated return type of UnminimizeComment
+"""
+type UnminimizeCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The comment that was unminimized.
+  """
+  unminimizedComment: Minimizable
+}
+
+"""
+Autogenerated input type of UnpinIssue
+"""
+input UnpinIssueInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the issue to be unpinned
+  """
+  issueId: ID! @possibleTypes(concreteTypes: ["Issue"])
+}
+
+"""
+Autogenerated return type of UnpinIssue
+"""
+type UnpinIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The issue that was unpinned
+  """
+  issue: Issue
+}
+
+"""
+Represents an 'unpinned' event on a given issue or pull request.
+"""
+type UnpinnedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the UnpinnedEvent object
+  """
+  id: ID!
+
+  """
+  Identifies the issue associated with the event.
+  """
+  issue: Issue!
+}
+
+"""
+Autogenerated input type of UnresolveReviewThread
+"""
+input UnresolveReviewThreadInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the thread to unresolve
+  """
+  threadId: ID! @possibleTypes(concreteTypes: ["PullRequestReviewThread"])
+}
+
+"""
+Autogenerated return type of UnresolveReviewThread
+"""
+type UnresolveReviewThreadPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The thread to resolve.
+  """
+  thread: PullRequestReviewThread
+}
+
+"""
+Represents an 'unsubscribed' event on a given `Subscribable`.
+"""
+type UnsubscribedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the UnsubscribedEvent object
+  """
+  id: ID!
+
+  """
+  Object referenced by event.
+  """
+  subscribable: Subscribable!
+}
+
+"""
+Entities that can be updated.
+"""
+interface Updatable {
+  """
+  Check if the current viewer can update this object.
+  """
+  viewerCanUpdate: Boolean!
+}
+
+"""
+Comments that can be updated.
+"""
+interface UpdatableComment {
+  """
+  Reasons why the current viewer can not update this comment.
+  """
+  viewerCannotUpdateReasons: [CommentCannotUpdateReason!]!
+}
+
+"""
+Autogenerated input type of UpdateBranchProtectionRule
+"""
+input UpdateBranchProtectionRuleInput {
+  """
+  Can this branch be deleted.
+  """
+  allowsDeletions: Boolean
+
+  """
+  Are force pushes allowed on this branch.
+  """
+  allowsForcePushes: Boolean
+
+  """
+  Is branch creation a protected operation.
+  """
+  blocksCreations: Boolean
+
+  """
+  The global relay id of the branch protection rule to be updated.
+  """
+  branchProtectionRuleId: ID! @possibleTypes(concreteTypes: ["BranchProtectionRule"])
+
+  """
+  A list of User, Team, or App IDs allowed to bypass force push targeting matching branches.
+  """
+  bypassForcePushActorIds: [ID!]
+
+  """
+  A list of User, Team, or App IDs allowed to bypass pull requests targeting matching branches.
+  """
+  bypassPullRequestActorIds: [ID!]
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Will new commits pushed to matching branches dismiss pull request review approvals.
+  """
+  dismissesStaleReviews: Boolean
+
+  """
+  Can admins overwrite branch protection.
+  """
+  isAdminEnforced: Boolean
+
+  """
+  Whether users can pull changes from upstream when the branch is locked. Set to
+  `true` to allow fork syncing. Set to `false` to prevent fork syncing.
+  """
+  lockAllowsFetchAndMerge: Boolean
+
+  """
+  Whether to set the branch as read-only. If this is true, users will not be able to push to the branch.
+  """
+  lockBranch: Boolean
+
+  """
+  The glob-like pattern used to determine matching branches.
+  """
+  pattern: String
+
+  """
+  A list of User, Team, or App IDs allowed to push to matching branches.
+  """
+  pushActorIds: [ID!]
+
+  """
+  Whether the most recent push must be approved by someone other than the person who pushed it
+  """
+  requireLastPushApproval: Boolean
+
+  """
+  Number of approving reviews required to update matching branches.
+  """
+  requiredApprovingReviewCount: Int
+
+  """
+  The list of required deployment environments
+  """
+  requiredDeploymentEnvironments: [String!]
+
+  """
+  List of required status check contexts that must pass for commits to be accepted to matching branches.
+  """
+  requiredStatusCheckContexts: [String!]
+
+  """
+  The list of required status checks
+  """
+  requiredStatusChecks: [RequiredStatusCheckInput!]
+
+  """
+  Are approving reviews required to update matching branches.
+  """
+  requiresApprovingReviews: Boolean
+
+  """
+  Are reviews from code owners required to update matching branches.
+  """
+  requiresCodeOwnerReviews: Boolean
+
+  """
+  Are commits required to be signed.
+  """
+  requiresCommitSignatures: Boolean
+
+  """
+  Are conversations required to be resolved before merging.
+  """
+  requiresConversationResolution: Boolean
+
+  """
+  Are successful deployments required before merging.
+  """
+  requiresDeployments: Boolean
+
+  """
+  Are merge commits prohibited from being pushed to this branch.
+  """
+  requiresLinearHistory: Boolean
+
+  """
+  Are status checks required to update matching branches.
+  """
+  requiresStatusChecks: Boolean
+
+  """
+  Are branches required to be up to date before merging.
+  """
+  requiresStrictStatusChecks: Boolean
+
+  """
+  Is pushing to matching branches restricted.
+  """
+  restrictsPushes: Boolean
+
+  """
+  Is dismissal of pull request reviews restricted.
+  """
+  restrictsReviewDismissals: Boolean
+
+  """
+  A list of User, Team, or App IDs allowed to dismiss reviews on pull requests targeting matching branches.
+  """
+  reviewDismissalActorIds: [ID!]
+}
+
+"""
+Autogenerated return type of UpdateBranchProtectionRule
+"""
+type UpdateBranchProtectionRulePayload {
+  """
+  The newly created BranchProtectionRule.
+  """
+  branchProtectionRule: BranchProtectionRule
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of UpdateCheckRun
+"""
+input UpdateCheckRunInput {
+  """
+  Possible further actions the integrator can perform, which a user may trigger.
+  """
+  actions: [CheckRunAction!]
+
+  """
+  The node of the check.
+  """
+  checkRunId: ID! @possibleTypes(concreteTypes: ["CheckRun"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The time that the check run finished.
+  """
+  completedAt: DateTime
+
+  """
+  The final conclusion of the check.
+  """
+  conclusion: CheckConclusionState
+
+  """
+  The URL of the integrator's site that has the full details of the check.
+  """
+  detailsUrl: URI
+
+  """
+  A reference for the run on the integrator's system.
+  """
+  externalId: String
+
+  """
+  The name of the check.
+  """
+  name: String
+
+  """
+  Descriptive details about the run.
+  """
+  output: CheckRunOutput
+
+  """
+  The node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  The time that the check run began.
+  """
+  startedAt: DateTime
+
+  """
+  The current status.
+  """
+  status: RequestableCheckStatusState
+}
+
+"""
+Autogenerated return type of UpdateCheckRun
+"""
+type UpdateCheckRunPayload {
+  """
+  The updated check run.
+  """
+  checkRun: CheckRun
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of UpdateCheckSuitePreferences
+"""
+input UpdateCheckSuitePreferencesInput {
+  """
+  The check suite preferences to modify.
+  """
+  autoTriggerPreferences: [CheckSuiteAutoTriggerPreference!]!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of UpdateCheckSuitePreferences
+"""
+type UpdateCheckSuitePreferencesPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated repository.
+  """
+  repository: Repository
+}
+
+"""
+Autogenerated input type of UpdateDiscussionComment
+"""
+input UpdateDiscussionCommentInput {
+  """
+  The new contents of the comment body.
+  """
+  body: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the discussion comment to update.
+  """
+  commentId: ID! @possibleTypes(concreteTypes: ["DiscussionComment"])
+}
+
+"""
+Autogenerated return type of UpdateDiscussionComment
+"""
+type UpdateDiscussionCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The modified discussion comment.
+  """
+  comment: DiscussionComment
+}
+
+"""
+Autogenerated input type of UpdateDiscussion
+"""
+input UpdateDiscussionInput {
+  """
+  The new contents of the discussion body.
+  """
+  body: String
+
+  """
+  The Node ID of a discussion category within the same repository to change this discussion to.
+  """
+  categoryId: ID @possibleTypes(concreteTypes: ["DiscussionCategory"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the discussion to update.
+  """
+  discussionId: ID! @possibleTypes(concreteTypes: ["Discussion"])
+
+  """
+  The new discussion title.
+  """
+  title: String
+}
+
+"""
+Autogenerated return type of UpdateDiscussion
+"""
+type UpdateDiscussionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The modified discussion.
+  """
+  discussion: Discussion
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseAdministratorRole
+"""
+input UpdateEnterpriseAdministratorRoleInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Enterprise which the admin belongs to.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The login of a administrator whose role is being changed.
+  """
+  login: String!
+
+  """
+  The new role for the Enterprise administrator.
+  """
+  role: EnterpriseAdministratorRole!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseAdministratorRole
+"""
+type UpdateEnterpriseAdministratorRolePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A message confirming the result of changing the administrator's role.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseAllowPrivateRepositoryForkingSetting
+"""
+input UpdateEnterpriseAllowPrivateRepositoryForkingSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the allow private repository forking setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the allow private repository forking policy on the enterprise.
+  """
+  policyValue: EnterpriseAllowPrivateRepositoryForkingPolicyValue
+
+  """
+  The value for the allow private repository forking setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseAllowPrivateRepositoryForkingSetting
+"""
+type UpdateEnterpriseAllowPrivateRepositoryForkingSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated allow private repository forking setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the allow private repository forking setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseDefaultRepositoryPermissionSetting
+"""
+input UpdateEnterpriseDefaultRepositoryPermissionSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the base repository permission setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the base repository permission setting on the enterprise.
+  """
+  settingValue: EnterpriseDefaultRepositoryPermissionSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseDefaultRepositoryPermissionSetting
+"""
+type UpdateEnterpriseDefaultRepositoryPermissionSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated base repository permission setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the base repository permission setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseMembersCanChangeRepositoryVisibilitySetting
+"""
+input UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the members can change repository visibility setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the members can change repository visibility setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseMembersCanChangeRepositoryVisibilitySetting
+"""
+type UpdateEnterpriseMembersCanChangeRepositoryVisibilitySettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated members can change repository visibility setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the members can change repository visibility setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseMembersCanCreateRepositoriesSetting
+"""
+input UpdateEnterpriseMembersCanCreateRepositoriesSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the members can create repositories setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  Allow members to create internal repositories. Defaults to current value.
+  """
+  membersCanCreateInternalRepositories: Boolean
+
+  """
+  Allow members to create private repositories. Defaults to current value.
+  """
+  membersCanCreatePrivateRepositories: Boolean
+
+  """
+  Allow members to create public repositories. Defaults to current value.
+  """
+  membersCanCreatePublicRepositories: Boolean
+
+  """
+  When false, allow member organizations to set their own repository creation member privileges.
+  """
+  membersCanCreateRepositoriesPolicyEnabled: Boolean
+
+  """
+  Value for the members can create repositories setting on the enterprise. This
+  or the granular public/private/internal allowed fields (but not both) must be provided.
+  """
+  settingValue: EnterpriseMembersCanCreateRepositoriesSettingValue
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseMembersCanCreateRepositoriesSetting
+"""
+type UpdateEnterpriseMembersCanCreateRepositoriesSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated members can create repositories setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the members can create repositories setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseMembersCanDeleteIssuesSetting
+"""
+input UpdateEnterpriseMembersCanDeleteIssuesSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the members can delete issues setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the members can delete issues setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseMembersCanDeleteIssuesSetting
+"""
+type UpdateEnterpriseMembersCanDeleteIssuesSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated members can delete issues setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the members can delete issues setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseMembersCanDeleteRepositoriesSetting
+"""
+input UpdateEnterpriseMembersCanDeleteRepositoriesSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the members can delete repositories setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the members can delete repositories setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseMembersCanDeleteRepositoriesSetting
+"""
+type UpdateEnterpriseMembersCanDeleteRepositoriesSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated members can delete repositories setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the members can delete repositories setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseMembersCanInviteCollaboratorsSetting
+"""
+input UpdateEnterpriseMembersCanInviteCollaboratorsSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the members can invite collaborators setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the members can invite collaborators setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseMembersCanInviteCollaboratorsSetting
+"""
+type UpdateEnterpriseMembersCanInviteCollaboratorsSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated members can invite collaborators setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the members can invite collaborators setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseMembersCanMakePurchasesSetting
+"""
+input UpdateEnterpriseMembersCanMakePurchasesSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the members can make purchases setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the members can make purchases setting on the enterprise.
+  """
+  settingValue: EnterpriseMembersCanMakePurchasesSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseMembersCanMakePurchasesSetting
+"""
+type UpdateEnterpriseMembersCanMakePurchasesSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated members can make purchases setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the members can make purchases setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseMembersCanUpdateProtectedBranchesSetting
+"""
+input UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the members can update protected branches setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the members can update protected branches setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseMembersCanUpdateProtectedBranchesSetting
+"""
+type UpdateEnterpriseMembersCanUpdateProtectedBranchesSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated members can update protected branches setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the members can update protected branches setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseMembersCanViewDependencyInsightsSetting
+"""
+input UpdateEnterpriseMembersCanViewDependencyInsightsSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the members can view dependency insights setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the members can view dependency insights setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseMembersCanViewDependencyInsightsSetting
+"""
+type UpdateEnterpriseMembersCanViewDependencyInsightsSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated members can view dependency insights setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the members can view dependency insights setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseOrganizationProjectsSetting
+"""
+input UpdateEnterpriseOrganizationProjectsSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the organization projects setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the organization projects setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseOrganizationProjectsSetting
+"""
+type UpdateEnterpriseOrganizationProjectsSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated organization projects setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the organization projects setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseOwnerOrganizationRole
+"""
+input UpdateEnterpriseOwnerOrganizationRoleInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Enterprise which the owner belongs to.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The ID of the organization for membership change.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+
+  """
+  The role to assume in the organization.
+  """
+  organizationRole: RoleInOrganization!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseOwnerOrganizationRole
+"""
+type UpdateEnterpriseOwnerOrganizationRolePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A message confirming the result of changing the owner's organization role.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseProfile
+"""
+input UpdateEnterpriseProfileInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The description of the enterprise.
+  """
+  description: String
+
+  """
+  The Enterprise ID to update.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The location of the enterprise.
+  """
+  location: String
+
+  """
+  The name of the enterprise.
+  """
+  name: String
+
+  """
+  The URL of the enterprise's website.
+  """
+  websiteUrl: String
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseProfile
+"""
+type UpdateEnterpriseProfilePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated enterprise.
+  """
+  enterprise: Enterprise
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseRepositoryProjectsSetting
+"""
+input UpdateEnterpriseRepositoryProjectsSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the repository projects setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the repository projects setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseRepositoryProjectsSetting
+"""
+type UpdateEnterpriseRepositoryProjectsSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated repository projects setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the repository projects setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseTeamDiscussionsSetting
+"""
+input UpdateEnterpriseTeamDiscussionsSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the team discussions setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the team discussions setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledDisabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseTeamDiscussionsSetting
+"""
+type UpdateEnterpriseTeamDiscussionsSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated team discussions setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the team discussions setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnterpriseTwoFactorAuthenticationRequiredSetting
+"""
+input UpdateEnterpriseTwoFactorAuthenticationRequiredSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the enterprise on which to set the two factor authentication required setting.
+  """
+  enterpriseId: ID! @possibleTypes(concreteTypes: ["Enterprise"])
+
+  """
+  The value for the two factor authentication required setting on the enterprise.
+  """
+  settingValue: EnterpriseEnabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateEnterpriseTwoFactorAuthenticationRequiredSetting
+"""
+type UpdateEnterpriseTwoFactorAuthenticationRequiredSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The enterprise with the updated two factor authentication required setting.
+  """
+  enterprise: Enterprise
+
+  """
+  A message confirming the result of updating the two factor authentication required setting.
+  """
+  message: String
+}
+
+"""
+Autogenerated input type of UpdateEnvironment
+"""
+input UpdateEnvironmentInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The node ID of the environment.
+  """
+  environmentId: ID! @possibleTypes(concreteTypes: ["Environment"])
+
+  """
+  Whether deployments to this environment can be approved by the user who created the deployment.
+  """
+  preventSelfReview: Boolean
+
+  """
+  The ids of users or teams that can approve deployments to this environment
+  """
+  reviewers: [ID!]
+
+  """
+  The wait timer in minutes.
+  """
+  waitTimer: Int
+}
+
+"""
+Autogenerated return type of UpdateEnvironment
+"""
+type UpdateEnvironmentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated environment.
+  """
+  environment: Environment
+}
+
+"""
+Autogenerated input type of UpdateIpAllowListEnabledSetting
+"""
+input UpdateIpAllowListEnabledSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the owner on which to set the IP allow list enabled setting.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["App", "Enterprise", "Organization"], abstractType: "IpAllowListOwner")
+
+  """
+  The value for the IP allow list enabled setting.
+  """
+  settingValue: IpAllowListEnabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateIpAllowListEnabledSetting
+"""
+type UpdateIpAllowListEnabledSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The IP allow list owner on which the setting was updated.
+  """
+  owner: IpAllowListOwner
+}
+
+"""
+Autogenerated input type of UpdateIpAllowListEntry
+"""
+input UpdateIpAllowListEntryInput {
+  """
+  An IP address or range of addresses in CIDR notation.
+  """
+  allowListValue: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the IP allow list entry to update.
+  """
+  ipAllowListEntryId: ID! @possibleTypes(concreteTypes: ["IpAllowListEntry"])
+
+  """
+  Whether the IP allow list entry is active when an IP allow list is enabled.
+  """
+  isActive: Boolean!
+
+  """
+  An optional name for the IP allow list entry.
+  """
+  name: String
+}
+
+"""
+Autogenerated return type of UpdateIpAllowListEntry
+"""
+type UpdateIpAllowListEntryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The IP allow list entry that was updated.
+  """
+  ipAllowListEntry: IpAllowListEntry
+}
+
+"""
+Autogenerated input type of UpdateIpAllowListForInstalledAppsEnabledSetting
+"""
+input UpdateIpAllowListForInstalledAppsEnabledSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the owner.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["App", "Enterprise", "Organization"], abstractType: "IpAllowListOwner")
+
+  """
+  The value for the IP allow list configuration for installed GitHub Apps setting.
+  """
+  settingValue: IpAllowListForInstalledAppsEnabledSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateIpAllowListForInstalledAppsEnabledSetting
+"""
+type UpdateIpAllowListForInstalledAppsEnabledSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The IP allow list owner on which the setting was updated.
+  """
+  owner: IpAllowListOwner
+}
+
+"""
+Autogenerated input type of UpdateIssueComment
+"""
+input UpdateIssueCommentInput {
+  """
+  The updated text of the comment.
+  """
+  body: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the IssueComment to modify.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["IssueComment"])
+}
+
+"""
+Autogenerated return type of UpdateIssueComment
+"""
+type UpdateIssueCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated comment.
+  """
+  issueComment: IssueComment
+}
+
+"""
+Autogenerated input type of UpdateIssue
+"""
+input UpdateIssueInput {
+  """
+  An array of Node IDs of users for this issue.
+  """
+  assigneeIds: [ID!] @possibleTypes(concreteTypes: ["User"])
+
+  """
+  The body for the issue description.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the Issue to modify.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["Issue"])
+
+  """
+  An array of Node IDs of labels for this issue.
+  """
+  labelIds: [ID!] @possibleTypes(concreteTypes: ["Label"])
+
+  """
+  The Node ID of the milestone for this issue.
+  """
+  milestoneId: ID @possibleTypes(concreteTypes: ["Milestone"])
+
+  """
+  An array of Node IDs for projects associated with this issue.
+  """
+  projectIds: [ID!]
+
+  """
+  The desired issue state.
+  """
+  state: IssueState
+
+  """
+  The title for the issue.
+  """
+  title: String
+}
+
+"""
+Autogenerated return type of UpdateIssue
+"""
+type UpdateIssuePayload {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The issue.
+  """
+  issue: Issue
+}
+
+"""
+Autogenerated input type of UpdateLabel
+"""
+input UpdateLabelInput @preview(toggledBy: "bane-preview") {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A 6 character hex code, without the leading #, identifying the updated color of the label.
+  """
+  color: String
+
+  """
+  A brief description of the label, such as its purpose.
+  """
+  description: String
+
+  """
+  The Node ID of the label to be updated.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["Label"])
+
+  """
+  The updated name of the label.
+  """
+  name: String
+}
+
+"""
+Autogenerated return type of UpdateLabel
+"""
+type UpdateLabelPayload @preview(toggledBy: "bane-preview") {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated label.
+  """
+  label: Label
+}
+
+"""
+Autogenerated input type of UpdateNotificationRestrictionSetting
+"""
+input UpdateNotificationRestrictionSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the owner on which to set the restrict notifications setting.
+  """
+  ownerId: ID! @possibleTypes(concreteTypes: ["Enterprise", "Organization"], abstractType: "VerifiableDomainOwner")
+
+  """
+  The value for the restrict notifications setting.
+  """
+  settingValue: NotificationRestrictionSettingValue!
+}
+
+"""
+Autogenerated return type of UpdateNotificationRestrictionSetting
+"""
+type UpdateNotificationRestrictionSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The owner on which the setting was updated.
+  """
+  owner: VerifiableDomainOwner
+}
+
+"""
+Autogenerated input type of UpdateOrganizationAllowPrivateRepositoryForkingSetting
+"""
+input UpdateOrganizationAllowPrivateRepositoryForkingSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Enable forking of private repositories in the organization?
+  """
+  forkingEnabled: Boolean!
+
+  """
+  The ID of the organization on which to set the allow private repository forking setting.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+}
+
+"""
+Autogenerated return type of UpdateOrganizationAllowPrivateRepositoryForkingSetting
+"""
+type UpdateOrganizationAllowPrivateRepositoryForkingSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A message confirming the result of updating the allow private repository forking setting.
+  """
+  message: String
+
+  """
+  The organization with the updated allow private repository forking setting.
+  """
+  organization: Organization
+}
+
+"""
+Autogenerated input type of UpdateOrganizationWebCommitSignoffSetting
+"""
+input UpdateOrganizationWebCommitSignoffSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the organization on which to set the web commit signoff setting.
+  """
+  organizationId: ID! @possibleTypes(concreteTypes: ["Organization"])
+
+  """
+  Enable signoff on web-based commits for repositories in the organization?
+  """
+  webCommitSignoffRequired: Boolean!
+}
+
+"""
+Autogenerated return type of UpdateOrganizationWebCommitSignoffSetting
+"""
+type UpdateOrganizationWebCommitSignoffSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A message confirming the result of updating the web commit signoff setting.
+  """
+  message: String
+
+  """
+  The organization with the updated web commit signoff setting.
+  """
+  organization: Organization
+}
+
+"""
+Only allow users with bypass permission to update matching refs.
+"""
+type UpdateParameters {
+  """
+  Branch can pull changes from its upstream repository
+  """
+  updateAllowsFetchAndMerge: Boolean!
+}
+
+"""
+Only allow users with bypass permission to update matching refs.
+"""
+input UpdateParametersInput {
+  """
+  Branch can pull changes from its upstream repository
+  """
+  updateAllowsFetchAndMerge: Boolean!
+}
+
+"""
+Autogenerated input type of UpdatePatreonSponsorability
+"""
+input UpdatePatreonSponsorabilityInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Whether Patreon tiers should be shown on the GitHub Sponsors profile page,
+  allowing potential sponsors to make their payment through Patreon instead of GitHub.
+  """
+  enablePatreonSponsorships: Boolean!
+
+  """
+  The username of the organization with the GitHub Sponsors profile, if any.
+  Defaults to the GitHub Sponsors profile for the authenticated user if omitted.
+  """
+  sponsorableLogin: String
+}
+
+"""
+Autogenerated return type of UpdatePatreonSponsorability
+"""
+type UpdatePatreonSponsorabilityPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The GitHub Sponsors profile.
+  """
+  sponsorsListing: SponsorsListing
+}
+
+"""
+Autogenerated input type of UpdateProjectCard
+"""
+input UpdateProjectCardInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Whether or not the ProjectCard should be archived
+  """
+  isArchived: Boolean
+
+  """
+  The note of ProjectCard.
+  """
+  note: String
+
+  """
+  The ProjectCard ID to update.
+  """
+  projectCardId: ID! @possibleTypes(concreteTypes: ["ProjectCard"])
+}
+
+"""
+Autogenerated return type of UpdateProjectCard
+"""
+type UpdateProjectCardPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated ProjectCard.
+  """
+  projectCard: ProjectCard
+}
+
+"""
+Autogenerated input type of UpdateProjectColumn
+"""
+input UpdateProjectColumnInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of project column.
+  """
+  name: String!
+
+  """
+  The ProjectColumn ID to update.
+  """
+  projectColumnId: ID! @possibleTypes(concreteTypes: ["ProjectColumn"])
+}
+
+"""
+Autogenerated return type of UpdateProjectColumn
+"""
+type UpdateProjectColumnPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated project column.
+  """
+  projectColumn: ProjectColumn
+}
+
+"""
+Autogenerated input type of UpdateProject
+"""
+input UpdateProjectInput {
+  """
+  The description of project.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The name of project.
+  """
+  name: String
+
+  """
+  The Project ID to update.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["Project"])
+
+  """
+  Whether the project is public or not.
+  """
+  public: Boolean
+
+  """
+  Whether the project is open or closed.
+  """
+  state: ProjectState
+}
+
+"""
+Autogenerated return type of UpdateProject
+"""
+type UpdateProjectPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated project.
+  """
+  project: Project
+}
+
+"""
+Autogenerated input type of UpdateProjectV2Collaborators
+"""
+input UpdateProjectV2CollaboratorsInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The collaborators to update.
+  """
+  collaborators: [ProjectV2Collaborator!]!
+
+  """
+  The ID of the project to update the collaborators for.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated return type of UpdateProjectV2Collaborators
+"""
+type UpdateProjectV2CollaboratorsPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The collaborators granted a role
+  """
+  collaborators(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2ActorConnection
+}
+
+"""
+Autogenerated input type of UpdateProjectV2DraftIssue
+"""
+input UpdateProjectV2DraftIssueInput {
+  """
+  The IDs of the assignees of the draft issue.
+  """
+  assigneeIds: [ID!] @possibleTypes(concreteTypes: ["User"])
+
+  """
+  The body of the draft issue.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the draft issue to update.
+  """
+  draftIssueId: ID! @possibleTypes(concreteTypes: ["DraftIssue"])
+
+  """
+  The title of the draft issue.
+  """
+  title: String
+}
+
+"""
+Autogenerated return type of UpdateProjectV2DraftIssue
+"""
+type UpdateProjectV2DraftIssuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The draft issue updated in the project.
+  """
+  draftIssue: DraftIssue
+}
+
+"""
+Autogenerated input type of UpdateProjectV2
+"""
+input UpdateProjectV2Input {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Set the project to closed or open.
+  """
+  closed: Boolean
+
+  """
+  The ID of the Project to update.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+
+  """
+  Set the project to public or private.
+  """
+  public: Boolean
+
+  """
+  Set the readme description of the project.
+  """
+  readme: String
+
+  """
+  Set the short description of the project.
+  """
+  shortDescription: String
+
+  """
+  Set the title of the project.
+  """
+  title: String
+}
+
+"""
+Autogenerated input type of UpdateProjectV2ItemFieldValue
+"""
+input UpdateProjectV2ItemFieldValueInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the field to be updated.
+  """
+  fieldId: ID!
+    @possibleTypes(
+      concreteTypes: ["ProjectV2Field", "ProjectV2IterationField", "ProjectV2SingleSelectField"]
+      abstractType: "ProjectV2FieldConfiguration"
+    )
+
+  """
+  The ID of the item to be updated.
+  """
+  itemId: ID! @possibleTypes(concreteTypes: ["ProjectV2Item"])
+
+  """
+  The ID of the Project.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+
+  """
+  The value which will be set on the field.
+  """
+  value: ProjectV2FieldValue!
+}
+
+"""
+Autogenerated return type of UpdateProjectV2ItemFieldValue
+"""
+type UpdateProjectV2ItemFieldValuePayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated item.
+  """
+  projectV2Item: ProjectV2Item
+}
+
+"""
+Autogenerated input type of UpdateProjectV2ItemPosition
+"""
+input UpdateProjectV2ItemPositionInput {
+  """
+  The ID of the item to position this item after. If omitted or set to null the item will be moved to top.
+  """
+  afterId: ID @possibleTypes(concreteTypes: ["ProjectV2Item"])
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the item to be moved.
+  """
+  itemId: ID! @possibleTypes(concreteTypes: ["ProjectV2Item"])
+
+  """
+  The ID of the Project.
+  """
+  projectId: ID! @possibleTypes(concreteTypes: ["ProjectV2"])
+}
+
+"""
+Autogenerated return type of UpdateProjectV2ItemPosition
+"""
+type UpdateProjectV2ItemPositionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The items in the new order
+  """
+  items(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2ItemConnection
+}
+
+"""
+Autogenerated return type of UpdateProjectV2
+"""
+type UpdateProjectV2Payload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated Project.
+  """
+  projectV2: ProjectV2
+}
+
+"""
+Autogenerated input type of UpdatePullRequestBranch
+"""
+input UpdatePullRequestBranchInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The head ref oid for the upstream branch.
+  """
+  expectedHeadOid: GitObjectID
+
+  """
+  The Node ID of the pull request.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+
+  """
+  The update branch method to use. If omitted, defaults to 'MERGE'
+  """
+  updateMethod: PullRequestBranchUpdateMethod
+}
+
+"""
+Autogenerated return type of UpdatePullRequestBranch
+"""
+type UpdatePullRequestBranchPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated pull request.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Autogenerated input type of UpdatePullRequest
+"""
+input UpdatePullRequestInput {
+  """
+  An array of Node IDs of users for this pull request.
+  """
+  assigneeIds: [ID!] @possibleTypes(concreteTypes: ["User"])
+
+  """
+  The name of the branch you want your changes pulled into. This should be an existing branch
+  on the current repository.
+  """
+  baseRefName: String
+
+  """
+  The contents of the pull request.
+  """
+  body: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  An array of Node IDs of labels for this pull request.
+  """
+  labelIds: [ID!] @possibleTypes(concreteTypes: ["Label"])
+
+  """
+  Indicates whether maintainers can modify the pull request.
+  """
+  maintainerCanModify: Boolean
+
+  """
+  The Node ID of the milestone for this pull request.
+  """
+  milestoneId: ID @possibleTypes(concreteTypes: ["Milestone"])
+
+  """
+  An array of Node IDs for projects associated with this pull request.
+  """
+  projectIds: [ID!]
+
+  """
+  The Node ID of the pull request.
+  """
+  pullRequestId: ID! @possibleTypes(concreteTypes: ["PullRequest"])
+
+  """
+  The target state of the pull request.
+  """
+  state: PullRequestUpdateState
+
+  """
+  The title of the pull request.
+  """
+  title: String
+}
+
+"""
+Autogenerated return type of UpdatePullRequest
+"""
+type UpdatePullRequestPayload {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated pull request.
+  """
+  pullRequest: PullRequest
+}
+
+"""
+Autogenerated input type of UpdatePullRequestReviewComment
+"""
+input UpdatePullRequestReviewCommentInput {
+  """
+  The text of the comment.
+  """
+  body: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the comment to modify.
+  """
+  pullRequestReviewCommentId: ID! @possibleTypes(concreteTypes: ["PullRequestReviewComment"])
+}
+
+"""
+Autogenerated return type of UpdatePullRequestReviewComment
+"""
+type UpdatePullRequestReviewCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated comment.
+  """
+  pullRequestReviewComment: PullRequestReviewComment
+}
+
+"""
+Autogenerated input type of UpdatePullRequestReview
+"""
+input UpdatePullRequestReviewInput {
+  """
+  The contents of the pull request review body.
+  """
+  body: String!
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the pull request review to modify.
+  """
+  pullRequestReviewId: ID! @possibleTypes(concreteTypes: ["PullRequestReview"])
+}
+
+"""
+Autogenerated return type of UpdatePullRequestReview
+"""
+type UpdatePullRequestReviewPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated pull request review.
+  """
+  pullRequestReview: PullRequestReview
+}
+
+"""
+Autogenerated input type of UpdateRef
+"""
+input UpdateRefInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Permit updates of branch Refs that are not fast-forwards?
+  """
+  force: Boolean = false
+
+  """
+  The GitObjectID that the Ref shall be updated to target.
+  """
+  oid: GitObjectID!
+
+  """
+  The Node ID of the Ref to be updated.
+  """
+  refId: ID! @possibleTypes(concreteTypes: ["Ref"])
+}
+
+"""
+Autogenerated return type of UpdateRef
+"""
+type UpdateRefPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated Ref.
+  """
+  ref: Ref
+}
+
+"""
+Autogenerated input type of UpdateRefs
+"""
+input UpdateRefsInput @preview(toggledBy: "update-refs-preview") {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A list of ref updates.
+  """
+  refUpdates: [RefUpdate!]!
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+}
+
+"""
+Autogenerated return type of UpdateRefs
+"""
+type UpdateRefsPayload @preview(toggledBy: "update-refs-preview") {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+}
+
+"""
+Autogenerated input type of UpdateRepository
+"""
+input UpdateRepositoryInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A new description for the repository. Pass an empty string to erase the existing description.
+  """
+  description: String
+
+  """
+  Indicates if the repository should have the discussions feature enabled.
+  """
+  hasDiscussionsEnabled: Boolean
+
+  """
+  Indicates if the repository should have the issues feature enabled.
+  """
+  hasIssuesEnabled: Boolean
+
+  """
+  Indicates if the repository should have the project boards feature enabled.
+  """
+  hasProjectsEnabled: Boolean
+
+  """
+  Indicates if the repository should have the wiki feature enabled.
+  """
+  hasWikiEnabled: Boolean
+
+  """
+  The URL for a web page about this repository. Pass an empty string to erase the existing URL.
+  """
+  homepageUrl: URI
+
+  """
+  The new name of the repository.
+  """
+  name: String
+
+  """
+  The ID of the repository to update.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  Whether this repository should be marked as a template such that anyone who
+  can access it can create new repositories with the same files and directory structure.
+  """
+  template: Boolean
+}
+
+"""
+Autogenerated return type of UpdateRepository
+"""
+type UpdateRepositoryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated repository.
+  """
+  repository: Repository
+}
+
+"""
+Autogenerated input type of UpdateRepositoryRuleset
+"""
+input UpdateRepositoryRulesetInput {
+  """
+  A list of actors that are allowed to bypass rules in this ruleset.
+  """
+  bypassActors: [RepositoryRulesetBypassActorInput!]
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The list of conditions for this ruleset
+  """
+  conditions: RepositoryRuleConditionsInput
+
+  """
+  The enforcement level for this ruleset
+  """
+  enforcement: RuleEnforcement
+
+  """
+  The name of the ruleset.
+  """
+  name: String
+
+  """
+  The global relay id of the repository ruleset to be updated.
+  """
+  repositoryRulesetId: ID! @possibleTypes(concreteTypes: ["RepositoryRuleset"])
+
+  """
+  The list of rules for this ruleset
+  """
+  rules: [RepositoryRuleInput!]
+
+  """
+  The target of the ruleset.
+  """
+  target: RepositoryRulesetTarget
+}
+
+"""
+Autogenerated return type of UpdateRepositoryRuleset
+"""
+type UpdateRepositoryRulesetPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The newly created Ruleset.
+  """
+  ruleset: RepositoryRuleset
+}
+
+"""
+Autogenerated input type of UpdateRepositoryWebCommitSignoffSetting
+"""
+input UpdateRepositoryWebCommitSignoffSettingInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the repository to update.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  Indicates if the repository should require signoff on web-based commits.
+  """
+  webCommitSignoffRequired: Boolean!
+}
+
+"""
+Autogenerated return type of UpdateRepositoryWebCommitSignoffSetting
+"""
+type UpdateRepositoryWebCommitSignoffSettingPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  A message confirming the result of updating the web commit signoff setting.
+  """
+  message: String
+
+  """
+  The updated repository.
+  """
+  repository: Repository
+}
+
+"""
+Autogenerated input type of UpdateSponsorshipPreferences
+"""
+input UpdateSponsorshipPreferencesInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Specify whether others should be able to see that the sponsor is sponsoring
+  the sponsorable. Public visibility still does not reveal which tier is used.
+  """
+  privacyLevel: SponsorshipPrivacy = PUBLIC
+
+  """
+  Whether the sponsor should receive email updates from the sponsorable.
+  """
+  receiveEmails: Boolean = true
+
+  """
+  The ID of the user or organization who is acting as the sponsor, paying for
+  the sponsorship. Required if sponsorLogin is not given.
+  """
+  sponsorId: ID @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "Sponsor")
+
+  """
+  The username of the user or organization who is acting as the sponsor, paying
+  for the sponsorship. Required if sponsorId is not given.
+  """
+  sponsorLogin: String
+
+  """
+  The ID of the user or organization who is receiving the sponsorship. Required if sponsorableLogin is not given.
+  """
+  sponsorableId: ID @possibleTypes(concreteTypes: ["Organization", "User"], abstractType: "Sponsorable")
+
+  """
+  The username of the user or organization who is receiving the sponsorship. Required if sponsorableId is not given.
+  """
+  sponsorableLogin: String
+}
+
+"""
+Autogenerated return type of UpdateSponsorshipPreferences
+"""
+type UpdateSponsorshipPreferencesPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The sponsorship that was updated.
+  """
+  sponsorship: Sponsorship
+}
+
+"""
+Autogenerated input type of UpdateSubscription
+"""
+input UpdateSubscriptionInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The new state of the subscription.
+  """
+  state: SubscriptionState!
+
+  """
+  The Node ID of the subscribable object to modify.
+  """
+  subscribableId: ID!
+    @possibleTypes(
+      concreteTypes: ["Commit", "Discussion", "Issue", "PullRequest", "Repository", "Team", "TeamDiscussion"]
+      abstractType: "Subscribable"
+    )
+}
+
+"""
+Autogenerated return type of UpdateSubscription
+"""
+type UpdateSubscriptionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The input subscribable entity.
+  """
+  subscribable: Subscribable
+}
+
+"""
+Autogenerated input type of UpdateTeamDiscussionComment
+"""
+input UpdateTeamDiscussionCommentInput {
+  """
+  The updated text of the comment.
+  """
+  body: String!
+
+  """
+  The current version of the body content.
+  """
+  bodyVersion: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the comment to modify.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["TeamDiscussionComment"])
+}
+
+"""
+Autogenerated return type of UpdateTeamDiscussionComment
+"""
+type UpdateTeamDiscussionCommentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated comment.
+  """
+  teamDiscussionComment: TeamDiscussionComment
+}
+
+"""
+Autogenerated input type of UpdateTeamDiscussion
+"""
+input UpdateTeamDiscussionInput {
+  """
+  The updated text of the discussion.
+  """
+  body: String
+
+  """
+  The current version of the body content. If provided, this update operation
+  will be rejected if the given version does not match the latest version on the server.
+  """
+  bodyVersion: String
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the discussion to modify.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["TeamDiscussion"])
+
+  """
+  If provided, sets the pinned state of the updated discussion.
+  """
+  pinned: Boolean
+
+  """
+  The updated title of the discussion.
+  """
+  title: String
+}
+
+"""
+Autogenerated return type of UpdateTeamDiscussion
+"""
+type UpdateTeamDiscussionPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The updated discussion.
+  """
+  teamDiscussion: TeamDiscussion
+}
+
+"""
+Autogenerated input type of UpdateTeamReviewAssignment
+"""
+input UpdateTeamReviewAssignmentInput @preview(toggledBy: "stone-crop-preview") {
+  """
+  The algorithm to use for review assignment
+  """
+  algorithm: TeamReviewAssignmentAlgorithm = ROUND_ROBIN
+
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Turn on or off review assignment
+  """
+  enabled: Boolean!
+
+  """
+  An array of team member IDs to exclude
+  """
+  excludedTeamMemberIds: [ID!] @possibleTypes(concreteTypes: ["User"])
+
+  """
+  The Node ID of the team to update review assignments of
+  """
+  id: ID! @possibleTypes(concreteTypes: ["Team"])
+
+  """
+  Notify the entire team of the PR if it is delegated
+  """
+  notifyTeam: Boolean = true
+
+  """
+  The number of team members to assign
+  """
+  teamMemberCount: Int = 1
+}
+
+"""
+Autogenerated return type of UpdateTeamReviewAssignment
+"""
+type UpdateTeamReviewAssignmentPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The team that was modified
+  """
+  team: Team
+}
+
+"""
+Autogenerated input type of UpdateTeamsRepository
+"""
+input UpdateTeamsRepositoryInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Permission that should be granted to the teams.
+  """
+  permission: RepositoryPermission!
+
+  """
+  Repository ID being granted access to.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  A list of teams being granted access. Limit: 10
+  """
+  teamIds: [ID!]! @possibleTypes(concreteTypes: ["Team"])
+}
+
+"""
+Autogenerated return type of UpdateTeamsRepository
+"""
+type UpdateTeamsRepositoryPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The repository that was updated.
+  """
+  repository: Repository
+
+  """
+  The teams granted permission on the repository.
+  """
+  teams: [Team!]
+}
+
+"""
+Autogenerated input type of UpdateTopics
+"""
+input UpdateTopicsInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The Node ID of the repository.
+  """
+  repositoryId: ID! @possibleTypes(concreteTypes: ["Repository"])
+
+  """
+  An array of topic names.
+  """
+  topicNames: [String!]!
+}
+
+"""
+Autogenerated return type of UpdateTopics
+"""
+type UpdateTopicsPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  Names of the provided topics that are not valid.
+  """
+  invalidTopicNames: [String!]
+
+  """
+  The updated repository.
+  """
+  repository: Repository
+}
+
+"""
+A user is an individual's account on GitHub that owns repositories and can make new content.
+"""
+type User implements Actor & Node & PackageOwner & ProfileOwner & ProjectOwner & ProjectV2Owner & ProjectV2Recent & RepositoryDiscussionAuthor & RepositoryDiscussionCommentAuthor & RepositoryOwner & Sponsorable & UniformResourceLocatable {
+  """
+  Determine if this repository owner has any items that can be pinned to their profile.
+  """
+  anyPinnableItems(
+    """
+    Filter to only a particular kind of pinnable item.
+    """
+    type: PinnableItemType
+  ): Boolean!
+
+  """
+  A URL pointing to the user's public avatar.
+  """
+  avatarUrl(
+    """
+    The size of the resulting square image.
+    """
+    size: Int
+  ): URI!
+
+  """
+  The user's public profile bio.
+  """
+  bio: String
+
+  """
+  The user's public profile bio as HTML.
+  """
+  bioHTML: HTML!
+
+  """
+  Could this user receive email notifications, if the organization had notification restrictions enabled?
+  """
+  canReceiveOrganizationEmailsWhenNotificationsRestricted(
+    """
+    The login of the organization to check.
+    """
+    login: String!
+  ): Boolean!
+
+  """
+  A list of commit comments made by this user.
+  """
+  commitComments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): CommitCommentConnection!
+
+  """
+  The user's public profile company.
+  """
+  company: String
+
+  """
+  The user's public profile company as HTML.
+  """
+  companyHTML: HTML!
+
+  """
+  The collection of contributions this user has made to different repositories.
+  """
+  contributionsCollection(
+    """
+    Only contributions made at this time or later will be counted. If omitted, defaults to a year ago.
+    """
+    from: DateTime
+
+    """
+    The ID of the organization used to filter contributions.
+    """
+    organizationID: ID
+
+    """
+    Only contributions made before and up to (including) this time will be
+    counted. If omitted, defaults to the current time or one year from the
+    provided from argument.
+    """
+    to: DateTime
+  ): ContributionsCollection!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The user's publicly visible profile email.
+  """
+  email: String!
+
+  """
+  A list of enterprises that the user belongs to.
+  """
+  enterprises(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter enterprises returned based on the user's membership type.
+    """
+    membershipType: EnterpriseMembershipType = ALL
+
+    """
+    Ordering options for the User's enterprises.
+    """
+    orderBy: EnterpriseOrder = {field: NAME, direction: ASC}
+  ): EnterpriseConnection
+
+  """
+  The estimated next GitHub Sponsors payout for this user/organization in cents (USD).
+  """
+  estimatedNextSponsorsPayoutInCents: Int!
+
+  """
+  A list of users the given user is followed by.
+  """
+  followers(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): FollowerConnection!
+
+  """
+  A list of users the given user is following.
+  """
+  following(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): FollowingConnection!
+
+  """
+  Find gist by repo name.
+  """
+  gist(
+    """
+    The gist name to find.
+    """
+    name: String!
+  ): Gist
+
+  """
+  A list of gist comments made by this user.
+  """
+  gistComments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): GistCommentConnection!
+
+  """
+  A list of the Gists the user has created.
+  """
+  gists(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for gists returned from the connection
+    """
+    orderBy: GistOrder
+
+    """
+    Filters Gists according to privacy.
+    """
+    privacy: GistPrivacy
+  ): GistConnection!
+
+  """
+  True if this user/organization has a GitHub Sponsors listing.
+  """
+  hasSponsorsListing: Boolean!
+
+  """
+  The hovercard information for this user in a given context
+  """
+  hovercard(
+    """
+    The ID of the subject to get the hovercard in the context of
+    """
+    primarySubjectId: ID
+  ): Hovercard!
+
+  """
+  The Node ID of the User object
+  """
+  id: ID!
+
+  """
+  The interaction ability settings for this user.
+  """
+  interactionAbility: RepositoryInteractionAbility
+
+  """
+  Whether or not this user is a participant in the GitHub Security Bug Bounty.
+  """
+  isBountyHunter: Boolean!
+
+  """
+  Whether or not this user is a participant in the GitHub Campus Experts Program.
+  """
+  isCampusExpert: Boolean!
+
+  """
+  Whether or not this user is a GitHub Developer Program member.
+  """
+  isDeveloperProgramMember: Boolean!
+
+  """
+  Whether or not this user is a GitHub employee.
+  """
+  isEmployee: Boolean!
+
+  """
+  Whether or not this user is following the viewer. Inverse of viewerIsFollowing
+  """
+  isFollowingViewer: Boolean!
+
+  """
+  Whether or not this user is a member of the GitHub Stars Program.
+  """
+  isGitHubStar: Boolean!
+
+  """
+  Whether or not the user has marked themselves as for hire.
+  """
+  isHireable: Boolean!
+
+  """
+  Whether or not this user is a site administrator.
+  """
+  isSiteAdmin: Boolean!
+
+  """
+  Whether the given account is sponsoring this user/organization.
+  """
+  isSponsoredBy(
+    """
+    The target account's login.
+    """
+    accountLogin: String!
+  ): Boolean!
+
+  """
+  True if the viewer is sponsored by this user/organization.
+  """
+  isSponsoringViewer: Boolean!
+
+  """
+  Whether or not this user is the viewing user.
+  """
+  isViewer: Boolean!
+
+  """
+  A list of issue comments made by this user.
+  """
+  issueComments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for issue comments returned from the connection.
+    """
+    orderBy: IssueCommentOrder
+  ): IssueCommentConnection!
+
+  """
+  A list of issues associated with this user.
+  """
+  issues(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Filtering options for issues returned from the connection.
+    """
+    filterBy: IssueFilters
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for issues returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the issues by.
+    """
+    states: [IssueState!]
+  ): IssueConnection!
+
+  """
+  Showcases a selection of repositories and gists that the profile owner has
+  either curated or that have been selected automatically based on popularity.
+  """
+  itemShowcase: ProfileItemShowcase!
+
+  """
+  The user's public profile location.
+  """
+  location: String
+
+  """
+  The username used to login.
+  """
+  login: String!
+
+  """
+  The estimated monthly GitHub Sponsors income for this user/organization in cents (USD).
+  """
+  monthlyEstimatedSponsorsIncomeInCents: Int!
+
+  """
+  The user's public profile name.
+  """
+  name: String
+
+  """
+  Find an organization by its login that the user belongs to.
+  """
+  organization(
+    """
+    The login of the organization to find.
+    """
+    login: String!
+  ): Organization
+
+  """
+  Verified email addresses that match verified domains for a specified organization the user is a member of.
+  """
+  organizationVerifiedDomainEmails(
+    """
+    The login of the organization to match verified domains from.
+    """
+    login: String!
+  ): [String!]!
+
+  """
+  A list of organizations the user belongs to.
+  """
+  organizations(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the User's organizations.
+    """
+    orderBy: OrganizationOrder = null
+  ): OrganizationConnection!
+
+  """
+  A list of packages under the owner.
+  """
+  packages(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Find packages by their names.
+    """
+    names: [String]
+
+    """
+    Ordering of the returned packages.
+    """
+    orderBy: PackageOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    Filter registry package by type.
+    """
+    packageType: PackageType
+
+    """
+    Find packages in a repository by ID.
+    """
+    repositoryId: ID
+  ): PackageConnection!
+
+  """
+  A list of repositories and gists this profile owner can pin to their profile.
+  """
+  pinnableItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter the types of pinnable items that are returned.
+    """
+    types: [PinnableItemType!]
+  ): PinnableItemConnection!
+
+  """
+  A list of repositories and gists this profile owner has pinned to their profile
+  """
+  pinnedItems(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter the types of pinned items that are returned.
+    """
+    types: [PinnableItemType!]
+  ): PinnableItemConnection!
+
+  """
+  Returns how many more items this profile owner can pin to their profile.
+  """
+  pinnedItemsRemaining: Int!
+
+  """
+  Find project by number.
+  """
+  project(
+    """
+    The project number to find.
+    """
+    number: Int!
+  ): Project
+
+  """
+  Find a project by number.
+  """
+  projectV2(
+    """
+    The project number.
+    """
+    number: Int!
+  ): ProjectV2
+
+  """
+  A list of projects under the owner.
+  """
+  projects(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for projects returned from the connection
+    """
+    orderBy: ProjectOrder
+
+    """
+    Query to search projects by, currently only searching by name.
+    """
+    search: String
+
+    """
+    A list of states to filter the projects by.
+    """
+    states: [ProjectState!]
+  ): ProjectConnection!
+
+  """
+  The HTTP path listing user's projects
+  """
+  projectsResourcePath: URI!
+
+  """
+  The HTTP URL listing user's projects
+  """
+  projectsUrl: URI!
+
+  """
+  A list of projects under the owner.
+  """
+  projectsV2(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    How to order the returned projects.
+    """
+    orderBy: ProjectV2Order = {field: NUMBER, direction: DESC}
+
+    """
+    A project to search for under the the owner.
+    """
+    query: String
+  ): ProjectV2Connection!
+
+  """
+  The user's profile pronouns
+  """
+  pronouns: String
+
+  """
+  A list of public keys associated with this user.
+  """
+  publicKeys(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): PublicKeyConnection!
+
+  """
+  A list of pull requests associated with this user.
+  """
+  pullRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    The base ref name to filter the pull requests by.
+    """
+    baseRefName: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    The head ref name to filter the pull requests by.
+    """
+    headRefName: String
+
+    """
+    A list of label names to filter the pull requests by.
+    """
+    labels: [String!]
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for pull requests returned from the connection.
+    """
+    orderBy: IssueOrder
+
+    """
+    A list of states to filter the pull requests by.
+    """
+    states: [PullRequestState!]
+  ): PullRequestConnection!
+
+  """
+  Recent projects that this user has modified in the context of the owner.
+  """
+  recentProjects(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): ProjectV2Connection!
+
+  """
+  A list of repositories that the user owns.
+  """
+  repositories(
+    """
+    Array of viewer's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    current viewer owns.
+    """
+    affiliations: [RepositoryAffiliation]
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    If non-null, filters repositories according to whether they have issues enabled
+    """
+    hasIssuesEnabled: Boolean
+
+    """
+    If non-null, filters repositories according to whether they are archived and not maintained
+    """
+    isArchived: Boolean
+
+    """
+    If non-null, filters repositories according to whether they are forks of another repository
+    """
+    isFork: Boolean
+
+    """
+    If non-null, filters repositories according to whether they have been locked
+    """
+    isLocked: Boolean
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories returned from the connection
+    """
+    orderBy: RepositoryOrder
+
+    """
+    Array of owner's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    organization or user being viewed owns.
+    """
+    ownerAffiliations: [RepositoryAffiliation] = [OWNER, COLLABORATOR]
+
+    """
+    If non-null, filters repositories according to privacy
+    """
+    privacy: RepositoryPrivacy
+  ): RepositoryConnection!
+
+  """
+  A list of repositories that the user recently contributed to.
+  """
+  repositoriesContributedTo(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    If non-null, include only the specified types of contributions. The
+    GitHub.com UI uses [COMMIT, ISSUE, PULL_REQUEST, REPOSITORY]
+    """
+    contributionTypes: [RepositoryContributionType]
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    If non-null, filters repositories according to whether they have issues enabled
+    """
+    hasIssues: Boolean
+
+    """
+    If true, include user repositories
+    """
+    includeUserRepositories: Boolean
+
+    """
+    If non-null, filters repositories according to whether they have been locked
+    """
+    isLocked: Boolean
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories returned from the connection
+    """
+    orderBy: RepositoryOrder
+
+    """
+    If non-null, filters repositories according to privacy
+    """
+    privacy: RepositoryPrivacy
+  ): RepositoryConnection!
+
+  """
+  Find Repository.
+  """
+  repository(
+    """
+    Follow repository renames. If disabled, a repository referenced by its old name will return an error.
+    """
+    followRenames: Boolean = true
+
+    """
+    Name of Repository to find.
+    """
+    name: String!
+  ): Repository
+
+  """
+  Discussion comments this user has authored.
+  """
+  repositoryDiscussionComments(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter discussion comments to only those that were marked as the answer
+    """
+    onlyAnswers: Boolean = false
+
+    """
+    Filter discussion comments to only those in a specific repository.
+    """
+    repositoryId: ID
+  ): DiscussionCommentConnection!
+
+  """
+  Discussions this user has started.
+  """
+  repositoryDiscussions(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Filter discussions to only those that have been answered or not. Defaults to
+    including both answered and unanswered discussions.
+    """
+    answered: Boolean = null
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for discussions returned from the connection.
+    """
+    orderBy: DiscussionOrder = {field: CREATED_AT, direction: DESC}
+
+    """
+    Filter discussions to only those in a specific repository.
+    """
+    repositoryId: ID
+
+    """
+    A list of states to filter the discussions by.
+    """
+    states: [DiscussionState!] = []
+  ): DiscussionConnection!
+
+  """
+  The HTTP path for this user
+  """
+  resourcePath: URI!
+
+  """
+  Replies this user has saved
+  """
+  savedReplies(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    The field to order saved replies by.
+    """
+    orderBy: SavedReplyOrder = {field: UPDATED_AT, direction: DESC}
+  ): SavedReplyConnection
+
+  """
+  The user's social media accounts, ordered as they appear on the user's profile.
+  """
+  socialAccounts(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): SocialAccountConnection!
+
+  """
+  List of users and organizations this entity is sponsoring.
+  """
+  sponsoring(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the users and organizations returned from the connection.
+    """
+    orderBy: SponsorOrder = {field: RELEVANCE, direction: DESC}
+  ): SponsorConnection!
+
+  """
+  List of sponsors for this user or organization.
+  """
+  sponsors(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsors returned from the connection.
+    """
+    orderBy: SponsorOrder = {field: RELEVANCE, direction: DESC}
+
+    """
+    If given, will filter for sponsors at the given tier. Will only return
+    sponsors whose tier the viewer is permitted to see.
+    """
+    tierId: ID
+  ): SponsorConnection!
+
+  """
+  Events involving this sponsorable, such as new sponsorships.
+  """
+  sponsorsActivities(
+    """
+    Filter activities to only the specified actions.
+    """
+    actions: [SponsorsActivityAction!] = []
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Whether to include those events where this sponsorable acted as the sponsor.
+    Defaults to only including events where this sponsorable was the recipient
+    of a sponsorship.
+    """
+    includeAsSponsor: Boolean = false
+
+    """
+    Whether or not to include private activities in the result set. Defaults to including public and private activities.
+    """
+    includePrivate: Boolean = true
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for activity returned from the connection.
+    """
+    orderBy: SponsorsActivityOrder = {field: TIMESTAMP, direction: DESC}
+
+    """
+    Filter activities returned to only those that occurred in the most recent
+    specified time period. Set to ALL to avoid filtering by when the activity
+    occurred. Will be ignored if `since` or `until` is given.
+    """
+    period: SponsorsActivityPeriod = MONTH
+
+    """
+    Filter activities to those that occurred on or after this time.
+    """
+    since: DateTime
+
+    """
+    Filter activities to those that occurred before this time.
+    """
+    until: DateTime
+  ): SponsorsActivityConnection!
+
+  """
+  The GitHub Sponsors listing for this user or organization.
+  """
+  sponsorsListing: SponsorsListing
+
+  """
+  The sponsorship from the viewer to this user/organization; that is, the sponsorship where you're the sponsor.
+  """
+  sponsorshipForViewerAsSponsor(
+    """
+    Whether to return the sponsorship only if it's still active. Pass false to
+    get the viewer's sponsorship back even if it has been cancelled.
+    """
+    activeOnly: Boolean = true
+  ): Sponsorship
+
+  """
+  The sponsorship from this user/organization to the viewer; that is, the sponsorship you're receiving.
+  """
+  sponsorshipForViewerAsSponsorable(
+    """
+    Whether to return the sponsorship only if it's still active. Pass false to
+    get the sponsorship back even if it has been cancelled.
+    """
+    activeOnly: Boolean = true
+  ): Sponsorship
+
+  """
+  List of sponsorship updates sent from this sponsorable to sponsors.
+  """
+  sponsorshipNewsletters(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsorship updates returned from the connection.
+    """
+    orderBy: SponsorshipNewsletterOrder = {field: CREATED_AT, direction: DESC}
+  ): SponsorshipNewsletterConnection!
+
+  """
+  The sponsorships where this user or organization is the maintainer receiving the funds.
+  """
+  sponsorshipsAsMaintainer(
+    """
+    Whether to include only sponsorships that are active right now, versus all
+    sponsorships this maintainer has ever received.
+    """
+    activeOnly: Boolean = true
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Whether or not to include private sponsorships in the result set
+    """
+    includePrivate: Boolean = false
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for sponsorships returned from this connection. If left
+    blank, the sponsorships will be ordered based on relevancy to the viewer.
+    """
+    orderBy: SponsorshipOrder
+  ): SponsorshipConnection!
+
+  """
+  The sponsorships where this user or organization is the funder.
+  """
+  sponsorshipsAsSponsor(
+    """
+    Whether to include only sponsorships that are active right now, versus all sponsorships this sponsor has ever made.
+    """
+    activeOnly: Boolean = true
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Filter sponsorships returned to those for the specified maintainers. That
+    is, the recipient of the sponsorship is a user or organization with one of
+    the given logins.
+    """
+    maintainerLogins: [String!]
+
+    """
+    Ordering options for sponsorships returned from this connection. If left
+    blank, the sponsorships will be ordered based on relevancy to the viewer.
+    """
+    orderBy: SponsorshipOrder
+  ): SponsorshipConnection!
+
+  """
+  Repositories the user has starred.
+  """
+  starredRepositories(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Order for connection
+    """
+    orderBy: StarOrder
+
+    """
+    Filters starred repositories to only return repositories owned by the viewer.
+    """
+    ownedByViewer: Boolean
+  ): StarredRepositoryConnection!
+
+  """
+  The user's description of what they're currently doing.
+  """
+  status: UserStatus
+
+  """
+  Repositories the user has contributed to, ordered by contribution rank, plus repositories the user has created
+  """
+  topRepositories(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories returned from the connection
+    """
+    orderBy: RepositoryOrder!
+
+    """
+    How far back in time to fetch contributed repositories
+    """
+    since: DateTime
+  ): RepositoryConnection!
+
+  """
+  The amount in United States cents (e.g., 500 = $5.00 USD) that this entity has
+  spent on GitHub to fund sponsorships. Only returns a value when viewed by the
+  user themselves or by a user who can manage sponsorships for the requested organization.
+  """
+  totalSponsorshipAmountAsSponsorInCents(
+    """
+    Filter payments to those that occurred on or after this time.
+    """
+    since: DateTime
+
+    """
+    Filter payments to those made to the users or organizations with the specified usernames.
+    """
+    sponsorableLogins: [String!] = []
+
+    """
+    Filter payments to those that occurred before this time.
+    """
+    until: DateTime
+  ): Int
+
+  """
+  The user's Twitter username.
+  """
+  twitterUsername: String
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this user
+  """
+  url: URI!
+
+  """
+  Can the viewer pin repositories and gists to the profile?
+  """
+  viewerCanChangePinnedItems: Boolean!
+
+  """
+  Can the current viewer create new projects on this owner.
+  """
+  viewerCanCreateProjects: Boolean!
+
+  """
+  Whether or not the viewer is able to follow the user.
+  """
+  viewerCanFollow: Boolean!
+
+  """
+  Whether or not the viewer is able to sponsor this user/organization.
+  """
+  viewerCanSponsor: Boolean!
+
+  """
+  Whether or not this user is followed by the viewer. Inverse of isFollowingViewer.
+  """
+  viewerIsFollowing: Boolean!
+
+  """
+  True if the viewer is sponsoring this user/organization.
+  """
+  viewerIsSponsoring: Boolean!
+
+  """
+  A list of repositories the given user is watching.
+  """
+  watching(
+    """
+    Affiliation options for repositories returned from the connection. If none
+    specified, the results will include repositories for which the current
+    viewer is an owner or collaborator, or member.
+    """
+    affiliations: [RepositoryAffiliation]
+
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    If non-null, filters repositories according to whether they have issues enabled
+    """
+    hasIssuesEnabled: Boolean
+
+    """
+    If non-null, filters repositories according to whether they have been locked
+    """
+    isLocked: Boolean
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for repositories returned from the connection
+    """
+    orderBy: RepositoryOrder
+
+    """
+    Array of owner's affiliation options for repositories returned from the
+    connection. For example, OWNER will include only repositories that the
+    organization or user being viewed owns.
+    """
+    ownerAffiliations: [RepositoryAffiliation] = [OWNER, COLLABORATOR]
+
+    """
+    If non-null, filters repositories according to privacy
+    """
+    privacy: RepositoryPrivacy
+  ): RepositoryConnection!
+
+  """
+  A URL pointing to the user's public website/blog.
+  """
+  websiteUrl: URI
+}
+
+"""
+The possible durations that a user can be blocked for.
+"""
+enum UserBlockDuration {
+  """
+  The user was blocked for 1 day
+  """
+  ONE_DAY
+
+  """
+  The user was blocked for 30 days
+  """
+  ONE_MONTH
+
+  """
+  The user was blocked for 7 days
+  """
+  ONE_WEEK
+
+  """
+  The user was blocked permanently
+  """
+  PERMANENT
+
+  """
+  The user was blocked for 3 days
+  """
+  THREE_DAYS
+}
+
+"""
+Represents a 'user_blocked' event on a given user.
+"""
+type UserBlockedEvent implements Node {
+  """
+  Identifies the actor who performed the event.
+  """
+  actor: Actor
+
+  """
+  Number of days that the user was blocked for.
+  """
+  blockDuration: UserBlockDuration!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  The Node ID of the UserBlockedEvent object
+  """
+  id: ID!
+
+  """
+  The user who was blocked.
+  """
+  subject: User
+}
+
+"""
+The connection type for User.
+"""
+type UserConnection {
+  """
+  A list of edges.
+  """
+  edges: [UserEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [User]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edit on user content
+"""
+type UserContentEdit implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the date and time when the object was deleted.
+  """
+  deletedAt: DateTime
+
+  """
+  The actor who deleted this content
+  """
+  deletedBy: Actor
+
+  """
+  A summary of the changes for this edit
+  """
+  diff: String
+
+  """
+  When this content was edited
+  """
+  editedAt: DateTime!
+
+  """
+  The actor who edited this content
+  """
+  editor: Actor
+
+  """
+  The Node ID of the UserContentEdit object
+  """
+  id: ID!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+}
+
+"""
+A list of edits to content.
+"""
+type UserContentEditConnection {
+  """
+  A list of edges.
+  """
+  edges: [UserContentEditEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [UserContentEdit]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type UserContentEditEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: UserContentEdit
+}
+
+"""
+Represents a user.
+"""
+type UserEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: User
+}
+
+"""
+Email attributes from External Identity
+"""
+type UserEmailMetadata {
+  """
+  Boolean to identify primary emails
+  """
+  primary: Boolean
+
+  """
+  Type of email
+  """
+  type: String
+
+  """
+  Email id
+  """
+  value: String!
+}
+
+"""
+The user's description of what they're currently doing.
+"""
+type UserStatus implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  An emoji summarizing the user's status.
+  """
+  emoji: String
+
+  """
+  The status emoji as HTML.
+  """
+  emojiHTML: HTML
+
+  """
+  If set, the status will not be shown after this date.
+  """
+  expiresAt: DateTime
+
+  """
+  The Node ID of the UserStatus object
+  """
+  id: ID!
+
+  """
+  Whether this status indicates the user is not fully available on GitHub.
+  """
+  indicatesLimitedAvailability: Boolean!
+
+  """
+  A brief message describing what the user is doing.
+  """
+  message: String
+
+  """
+  The organization whose members can see this status. If null, this status is publicly visible.
+  """
+  organization: Organization
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The user who has this status.
+  """
+  user: User!
+}
+
+"""
+The connection type for UserStatus.
+"""
+type UserStatusConnection {
+  """
+  A list of edges.
+  """
+  edges: [UserStatusEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [UserStatus]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type UserStatusEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: UserStatus
+}
+
+"""
+Ordering options for user status connections.
+"""
+input UserStatusOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order user statuses by.
+  """
+  field: UserStatusOrderField!
+}
+
+"""
+Properties by which user status connections can be ordered.
+"""
+enum UserStatusOrderField {
+  """
+  Order user statuses by when they were updated.
+  """
+  UPDATED_AT
+}
+
+"""
+A domain that can be verified or approved for an organization or an enterprise.
+"""
+type VerifiableDomain implements Node {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The DNS host name that should be used for verification.
+  """
+  dnsHostName: URI
+
+  """
+  The unicode encoded domain.
+  """
+  domain: URI!
+
+  """
+  Whether a TXT record for verification with the expected host name was found.
+  """
+  hasFoundHostName: Boolean!
+
+  """
+  Whether a TXT record for verification with the expected verification token was found.
+  """
+  hasFoundVerificationToken: Boolean!
+
+  """
+  The Node ID of the VerifiableDomain object
+  """
+  id: ID!
+
+  """
+  Whether or not the domain is approved.
+  """
+  isApproved: Boolean!
+
+  """
+  Whether this domain is required to exist for an organization or enterprise policy to be enforced.
+  """
+  isRequiredForPolicyEnforcement: Boolean!
+
+  """
+  Whether or not the domain is verified.
+  """
+  isVerified: Boolean!
+
+  """
+  The owner of the domain.
+  """
+  owner: VerifiableDomainOwner!
+
+  """
+  The punycode encoded domain.
+  """
+  punycodeEncodedDomain: URI!
+
+  """
+  The time that the current verification token will expire.
+  """
+  tokenExpirationTime: DateTime
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The current verification token for the domain.
+  """
+  verificationToken: String
+}
+
+"""
+The connection type for VerifiableDomain.
+"""
+type VerifiableDomainConnection {
+  """
+  A list of edges.
+  """
+  edges: [VerifiableDomainEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [VerifiableDomain]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type VerifiableDomainEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: VerifiableDomain
+}
+
+"""
+Ordering options for verifiable domain connections.
+"""
+input VerifiableDomainOrder {
+  """
+  The ordering direction.
+  """
+  direction: OrderDirection!
+
+  """
+  The field to order verifiable domains by.
+  """
+  field: VerifiableDomainOrderField!
+}
+
+"""
+Properties by which verifiable domain connections can be ordered.
+"""
+enum VerifiableDomainOrderField {
+  """
+  Order verifiable domains by their creation date.
+  """
+  CREATED_AT
+
+  """
+  Order verifiable domains by the domain name.
+  """
+  DOMAIN
+}
+
+"""
+Types that can own a verifiable domain.
+"""
+union VerifiableDomainOwner = Enterprise | Organization
+
+"""
+Autogenerated input type of VerifyVerifiableDomain
+"""
+input VerifyVerifiableDomainInput {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The ID of the verifiable domain to verify.
+  """
+  id: ID! @possibleTypes(concreteTypes: ["VerifiableDomain"])
+}
+
+"""
+Autogenerated return type of VerifyVerifiableDomain
+"""
+type VerifyVerifiableDomainPayload {
+  """
+  A unique identifier for the client performing the mutation.
+  """
+  clientMutationId: String
+
+  """
+  The verifiable domain that was verified.
+  """
+  domain: VerifiableDomain
+}
+
+"""
+A hovercard context with a message describing how the viewer is related.
+"""
+type ViewerHovercardContext implements HovercardContext {
+  """
+  A string describing this context
+  """
+  message: String!
+
+  """
+  An octicon to accompany this context
+  """
+  octicon: String!
+
+  """
+  Identifies the user who is related to this context.
+  """
+  viewer: User!
+}
+
+"""
+A subject that may be upvoted.
+"""
+interface Votable {
+  """
+  Number of upvotes that this subject has received.
+  """
+  upvoteCount: Int!
+
+  """
+  Whether or not the current user can add or remove an upvote on this subject.
+  """
+  viewerCanUpvote: Boolean!
+
+  """
+  Whether or not the current user has already upvoted this subject.
+  """
+  viewerHasUpvoted: Boolean!
+}
+
+"""
+A workflow contains meta information about an Actions workflow file.
+"""
+type Workflow implements Node & UniformResourceLocatable {
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The Node ID of the Workflow object
+  """
+  id: ID!
+
+  """
+  The name of the workflow.
+  """
+  name: String!
+
+  """
+  The HTTP path for this workflow
+  """
+  resourcePath: URI!
+
+  """
+  The runs of the workflow.
+  """
+  runs(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+
+    """
+    Ordering options for the connection
+    """
+    orderBy: WorkflowRunOrder = {field: CREATED_AT, direction: DESC}
+  ): WorkflowRunConnection!
+
+  """
+  The state of the workflow.
+  """
+  state: WorkflowState!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this workflow
+  """
+  url: URI!
+}
+
+"""
+A workflow that must run for this rule to pass
+"""
+type WorkflowFileReference {
+  """
+  The path to the workflow file
+  """
+  path: String!
+
+  """
+  The ref (branch or tag) of the workflow file to use
+  """
+  ref: String
+
+  """
+  The ID of the repository where the workflow is defined
+  """
+  repositoryId: Int!
+
+  """
+  The commit SHA of the workflow file to use
+  """
+  sha: String
+}
+
+"""
+A workflow that must run for this rule to pass
+"""
+input WorkflowFileReferenceInput {
+  """
+  The path to the workflow file
+  """
+  path: String!
+
+  """
+  The ref (branch or tag) of the workflow file to use
+  """
+  ref: String
+
+  """
+  The ID of the repository where the workflow is defined
+  """
+  repositoryId: Int!
+
+  """
+  The commit SHA of the workflow file to use
+  """
+  sha: String
+}
+
+"""
+A workflow run.
+"""
+type WorkflowRun implements Node & UniformResourceLocatable {
+  """
+  The check suite this workflow run belongs to.
+  """
+  checkSuite: CheckSuite!
+
+  """
+  Identifies the date and time when the object was created.
+  """
+  createdAt: DateTime!
+
+  """
+  Identifies the primary key from the database.
+  """
+  databaseId: Int
+
+  """
+  The log of deployment reviews
+  """
+  deploymentReviews(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DeploymentReviewConnection!
+
+  """
+  The event that triggered the workflow run
+  """
+  event: String!
+
+  """
+  The workflow file
+  """
+  file: WorkflowRunFile
+
+  """
+  The Node ID of the WorkflowRun object
+  """
+  id: ID!
+
+  """
+  The pending deployment requests of all check runs in this workflow run
+  """
+  pendingDeploymentRequests(
+    """
+    Returns the elements in the list that come after the specified cursor.
+    """
+    after: String
+
+    """
+    Returns the elements in the list that come before the specified cursor.
+    """
+    before: String
+
+    """
+    Returns the first _n_ elements from the list.
+    """
+    first: Int
+
+    """
+    Returns the last _n_ elements from the list.
+    """
+    last: Int
+  ): DeploymentRequestConnection!
+
+  """
+  The HTTP path for this workflow run
+  """
+  resourcePath: URI!
+
+  """
+  A number that uniquely identifies this workflow run in its parent workflow.
+  """
+  runNumber: Int!
+
+  """
+  Identifies the date and time when the object was last updated.
+  """
+  updatedAt: DateTime!
+
+  """
+  The HTTP URL for this workflow run
+  """
+  url: URI!
+
+  """
+  The workflow executed in this workflow run.
+  """
+  workflow: Workflow!
+}
+
+"""
+The connection type for WorkflowRun.
+"""
+type WorkflowRunConnection {
+  """
+  A list of edges.
+  """
+  edges: [WorkflowRunEdge]
+
+  """
+  A list of nodes.
+  """
+  nodes: [WorkflowRun]
+
+  """
+  Information to aid in pagination.
+  """
+  pageInfo: PageInfo!
+
+  """
+  Identifies the total count of items in the connection.
+  """
+  totalCount: Int!
+}
+
+"""
+An edge in a connection.
+"""
+type WorkflowRunEdge {
+  """
+  A cursor for use in pagination.
+  """
+  cursor: String!
+
+  """
+  The item at the end of the edge.
+  """
+  node: WorkflowRun
+}
+
+"""
+An executed workflow file for a workflow run.
+"""
+type WorkflowRunFile implements Node & UniformResourceLocatable {
+  """
+  The Node ID of the WorkflowRunFile object
+  """
+  id: ID!
+
+  """
+  The path of the workflow file relative to its repository.
+  """
+  path: String!
+
+  """
+  The direct link to the file in the repository which stores the workflow file.
+  """
+  repositoryFileUrl: URI!
+
+  """
+  The repository name and owner which stores the workflow file.
+  """
+  repositoryName: URI!
+
+  """
+  The HTTP path for this workflow run file
+  """
+  resourcePath: URI!
+
+  """
+  The parent workflow run execution for this file.
+  """
+  run: WorkflowRun!
+
+  """
+  The HTTP URL for this workflow run file
+  """
+  url: URI!
+
+  """
+  If the viewer has permissions to push to the repository which stores the workflow.
+  """
+  viewerCanPushRepository: Boolean!
+
+  """
+  If the viewer has permissions to read the repository which stores the workflow.
+  """
+  viewerCanReadRepository: Boolean!
+}
+
+"""
+Ways in which lists of workflow runs can be ordered upon return.
+"""
+input WorkflowRunOrder {
+  """
+  The direction in which to order workflow runs by the specified field.
+  """
+  direction: OrderDirection!
+
+  """
+  The field by which to order workflows.
+  """
+  field: WorkflowRunOrderField!
+}
+
+"""
+Properties by which workflow run connections can be ordered.
+"""
+enum WorkflowRunOrderField {
+  """
+  Order workflow runs by most recently created
+  """
+  CREATED_AT
+}
+
+"""
+The possible states for a workflow.
+"""
+enum WorkflowState {
+  """
+  The workflow is active.
+  """
+  ACTIVE
+
+  """
+  The workflow was deleted from the git repository.
+  """
+  DELETED
+
+  """
+  The workflow was disabled by default on a fork.
+  """
+  DISABLED_FORK
+
+  """
+  The workflow was disabled for inactivity in the repository.
+  """
+  DISABLED_INACTIVITY
+
+  """
+  The workflow was disabled manually.
+  """
+  DISABLED_MANUALLY
+}
+
+"""
+Require all changes made to a targeted branch to pass the specified workflows before they can be merged.
+"""
+type WorkflowsParameters {
+  """
+  Workflows that must pass for this rule to pass.
+  """
+  workflows: [WorkflowFileReference!]!
+}
+
+"""
+Require all changes made to a targeted branch to pass the specified workflows before they can be merged.
+"""
+input WorkflowsParametersInput {
+  """
+  Workflows that must pass for this rule to pass.
+  """
+  workflows: [WorkflowFileReferenceInput!]!
+}
+
+"""
+A valid x509 certificate string
+"""
+scalar X509Certificate
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/simple-star-wars.graphql 0.45.0-1/tests/data/graphql/simple-star-wars.graphql
--- 0.26.4-3/tests/data/graphql/simple-star-wars.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/simple-star-wars.graphql	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,142 @@
+type Person {
+    id: ID!
+    name: String!
+    height: Int
+    mass: Int
+    hair_color: String
+    skin_color: String
+    eye_color: String
+    birth_year: String
+    gender: String
+
+    # Relationships
+    homeworld_id: ID
+    homeworld: Planet
+    species: [Species!]!
+    species_ids: [ID!]!
+    films: [Film!]!
+    films_ids: [ID!]!
+    starships: [Starship!]!
+    starships_ids: [ID!]!
+    vehicles: [Vehicle!]!
+    vehicles_ids: [ID!]!
+}
+
+type Planet {
+    id: ID!
+    name: String!
+    rotation_period: String
+    orbital_period: String
+    diameter: String
+    climate: String
+    gravity: String
+    terrain: String
+    surface_water: String
+    population: String
+
+    # Relationships
+    residents: [Person!]!
+    residents_ids: [ID!]!
+    films: [Film!]!
+    films_ids: [ID!]!
+}
+
+type Species {
+    id: ID!
+    name: String!
+    classification: String
+    designation: String
+    average_height: String
+    skin_colors: String
+    hair_colors: String
+    eye_colors: String
+    average_lifespan: String
+    language: String
+
+    # Relationships
+    people: [Person!]!
+    people_ids: [ID!]!
+    films: [Film!]!
+    films_ids: [ID!]!
+}
+
+type Vehicle {
+    id: ID!
+    name: String!
+    model: String
+    manufacturer: String
+    cost_in_credits: String
+    length: String
+    max_atmosphering_speed: String
+    crew: String
+    passengers: String
+    cargo_capacity: String
+    consumables: String
+    vehicle_class: String
+
+    # Relationships
+    pilots: [Person!]!
+    pilots_ids: [ID!]!
+    films: [Film!]!
+    films_ids: [ID!]!
+}
+
+type Starship {
+    id: ID!
+    name: String!
+    model: String
+    manufacturer: String
+    cost_in_credits: String
+    length: String
+    max_atmosphering_speed: String
+    crew: String
+    passengers: String
+    cargo_capacity: String
+    consumables: String
+    hyperdrive_rating: String
+    MGLT: String
+    starship_class: String
+
+    # Relationships
+    pilots: [Person!]!
+    pilots_ids: [ID!]!
+    films: [Film!]!
+    films_ids: [ID!]!
+}
+
+type Film {
+  id: ID!
+  title: String!
+  episode_id: Int!
+  opening_crawl: String!
+  director: String!
+  producer: String
+  release_date: String!
+
+  # Relationships
+  characters: [Person!]!
+  characters_ids: [ID!]!
+  planets: [Planet!]!
+  planets_ids: [ID!]!
+  starships: [Starship!]!
+  starships_ids: [ID!]!
+  vehicles: [Vehicle!]!
+  vehicles_ids: [ID!]!
+  species: [Species!]!
+  species_ids: [ID!]!
+}
+
+type Query {
+  planet(id: ID!): Planet
+  listPlanets(page: Int): [Planet!]!
+  person(id: ID!): Person
+  listPeople(page: Int): [Person!]!
+  species(id: ID!): Species
+  listSpecies(page: Int): [Species!]!
+  film(id: ID!): Film
+  listFilms(page: Int): [Film!]!
+  starship(id: ID!): Starship
+  listStarships(page: Int): [Starship!]!
+  vehicle(id: ID!): Vehicle
+  listVehicles(page: Int): [Vehicle!]!
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/type_alias.graphql 0.45.0-1/tests/data/graphql/type_alias.graphql
--- 0.26.4-3/tests/data/graphql/type_alias.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/type_alias.graphql	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+scalar SimpleString
+
+type Person {
+  name: String!
+  age: Int!
+}
+
+type Pet {
+  name: String!
+  type: String!
+}
+
+union UnionType = Person | Pet
+
+type ModelWithTypeAliasField {
+  simple_field: SimpleString
+  union_field: UnionType
+  string_field: String
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/graphql/union-aliased-bug.graphql 0.45.0-1/tests/data/graphql/union-aliased-bug.graphql
--- 0.26.4-3/tests/data/graphql/union-aliased-bug.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/union-aliased-bug.graphql	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+type GroupMetadata {
+  name: String!
+}
+
+type UserMetadata {
+  name: String!
+}
+
+union Metadata = UserMetadata | GroupMetadata
+
+type Resource {
+  metadata: UserMetadata!
+}
diff -pruN 0.26.4-3/tests/data/graphql/union-commented.graphql 0.45.0-1/tests/data/graphql/union-commented.graphql
--- 0.26.4-3/tests/data/graphql/union-commented.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/union-commented.graphql	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+"""This is a test comment in a single line"""
+type GroupMetadata {
+  name: String!
+}
+
+"""This is a multiline comment,
+with a line break,
+and a line break
+"""
+type UserMetadata {
+  name: String!
+}
+
+"""This is another multiline comment,
+with a line break,
+and another line break
+"""
+union Metadata = UserMetadata | GroupMetadata
+
+"""This is a single line comment"""
+union DummyMetadata = UserMetadata | GroupMetadata
+
+type Resource {
+  metadata: UserMetadata!
+}
diff -pruN 0.26.4-3/tests/data/graphql/union.graphql 0.45.0-1/tests/data/graphql/union.graphql
--- 0.26.4-3/tests/data/graphql/union.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/union.graphql	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+interface IResource {
+    id: ID!
+}
+
+type Employee implements IResource {
+    id: ID!
+    firstName: String
+    lastName: String
+}
+
+type Car implements IResource {
+    id: ID!
+    passengerCapacity: Int!
+}
+
+union Resource = Employee | Car
+
+union TechnicalResource = Car
diff -pruN 0.26.4-3/tests/data/graphql/use-standard-collections.graphql 0.45.0-1/tests/data/graphql/use-standard-collections.graphql
--- 0.26.4-3/tests/data/graphql/use-standard-collections.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/use-standard-collections.graphql	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+type A {
+    field: String!
+    listField: [String!]!
+    listListField:[[String!]!]!
+}
+
diff -pruN 0.26.4-3/tests/data/graphql/use-union-operator.graphql 0.45.0-1/tests/data/graphql/use-union-operator.graphql
--- 0.26.4-3/tests/data/graphql/use-union-operator.graphql	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/graphql/use-union-operator.graphql	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+type A {
+    field: String!
+    optionalField: String
+    optionalListOptionalField: [String]
+    listOptionalField: [String]!
+    listField: [String!]!
+    optionalListListOptionalField:[[String]!]
+    listListOptionalField:[[String]!]!
+    listOptionalListOptionalField:[[String]]!
+    optionalListOptionalListField:[[String!]]
+    optionalListListField:[[String!]!]
+    listListField:[[String!]!]!
+    listOptionalListField:[[String!]]!
+}
diff -pruN 0.26.4-3/tests/data/json/array_include_null.json 0.45.0-1/tests/data/json/array_include_null.json
--- 0.26.4-3/tests/data/json/array_include_null.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/json/array_include_null.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "items": [
+    {
+      "oofield": null
+    },
+    {
+      "oofield": [1, 2, 3]
+    }
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/json/broken.json 0.45.0-1/tests/data/json/broken.json
--- 0.26.4-3/tests/data/json/broken.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/json/broken.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+{
+  "Pet": {
diff -pruN 0.26.4-3/tests/data/json/duplicate_models.json 0.45.0-1/tests/data/json/duplicate_models.json
--- 0.26.4-3/tests/data/json/duplicate_models.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/json/duplicate_models.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+    "Arm Right": {
+        "Joint 1": 5,
+        "Joint 2": 3,
+        "Joint 3": 66
+    },
+    "Arm Left": {
+        "Joint 1": 55,
+        "Joint 2": 13,
+        "Joint 3": 6
+    },
+    "Head": {
+        "Joint 1": 10
+    }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/json/pet.json 0.45.0-1/tests/data/json/pet.json
--- 0.26.4-3/tests/data/json/pet.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/json/pet.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "Pet": {
+    "name": "dog",
+    "age": 2
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/json/simple.json 0.45.0-1/tests/data/json/simple.json
--- 0.26.4-3/tests/data/json/simple.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/json/simple.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1 @@
+{"petName": "Lady"}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/json/snake_case.json 0.45.0-1/tests/data/json/snake_case.json
--- 0.26.4-3/tests/data/json/snake_case.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/json/snake_case.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+{
+    "snake_case": "Snake case",
+    "camelCase": "Camel case",
+    "kebab-case": "Kebab case",
+    "PascalCase": "Pascal case",
+    "UPPER_CASE": "Upper case",
+    "Dev_Info": "example 1",
+    "CLONE_Device": "example 2"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/json/space_and_special_characters.json 0.45.0-1/tests/data/json/space_and_special_characters.json
--- 0.26.4-3/tests/data/json/space_and_special_characters.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/json/space_and_special_characters.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+{
+    "Serial Number": "A12345678",
+    "Timestamp": "2020-05-26T12:15:25.792741Z",
+    "Data": {
+        "Length (m)": 12.34,
+        "Symmetric deviation (%)": 12.216564148290807,
+        "Total running time (s)": 974,
+        "Mass (kg)": 42.23,
+        "Initial parameters": {
+            "V1": 123,
+            "V2": 456
+        },
+        "class": "Unknown"
+    },
+    "values": {
+        "1 Step": "42",
+        "2 Step": "23"
+    },
+    "recursive": {
+        "sub": {
+            "recursive": {
+                "value": 42.23
+            }
+        }
+    }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/alias_import_alias/a.schema.json 0.45.0-1/tests/data/jsonschema/alias_import_alias/a.schema.json
--- 0.26.4-3/tests/data/jsonschema/alias_import_alias/a.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/alias_import_alias/a.schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "TypeA",
+  "type": "object",
+  "properties": {
+    "value": {
+      "type": "string"
+    }
+  },
+  "required": ["value"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/alias_import_alias/b.schema.json 0.45.0-1/tests/data/jsonschema/alias_import_alias/b.schema.json
--- 0.26.4-3/tests/data/jsonschema/alias_import_alias/b.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/alias_import_alias/b.schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Container",
+  "type": "object",
+  "properties": {
+    "a_schema": {
+      "$ref": "a.schema.json"
+    }
+  },
+  "required": ["a_schema"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/alias_import_alias/date.schema.json 0.45.0-1/tests/data/jsonschema/alias_import_alias/date.schema.json
--- 0.26.4-3/tests/data/jsonschema/alias_import_alias/date.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/alias_import_alias/date.schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "TypeDate",
+  "type": "object",
+  "properties": {
+    "date": { "type": ["string", "null"], "format": "date"},
+    "datetime": { "type": ["string", "null"], "format": "date-time"}
+  },
+  "required": ["date"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_exports_multi_file/order.json 0.45.0-1/tests/data/jsonschema/all_exports_multi_file/order.json
--- 0.26.4-3/tests/data/jsonschema/all_exports_multi_file/order.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/all_exports_multi_file/order.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "title": "Order",
+  "properties": {
+    "id": {
+      "type": "string"
+    },
+    "user_id": {
+      "type": "string"
+    },
+    "product_id": {
+      "type": "string"
+    }
+  },
+  "required": ["id", "user_id", "product_id"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_exports_multi_file/product.json 0.45.0-1/tests/data/jsonschema/all_exports_multi_file/product.json
--- 0.26.4-3/tests/data/jsonschema/all_exports_multi_file/product.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/all_exports_multi_file/product.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "title": "Product",
+  "properties": {
+    "id": {
+      "type": "string"
+    },
+    "price": {
+      "type": "number"
+    }
+  },
+  "required": ["id", "price"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_exports_multi_file/user.json 0.45.0-1/tests/data/jsonschema/all_exports_multi_file/user.json
--- 0.26.4-3/tests/data/jsonschema/all_exports_multi_file/user.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/all_exports_multi_file/user.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "title": "User",
+  "properties": {
+    "id": {
+      "type": "string"
+    },
+    "name": {
+      "type": "string"
+    }
+  },
+  "required": ["id", "name"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_any_of/direct.json 0.45.0-1/tests/data/jsonschema/all_of_any_of/direct.json
--- 0.26.4-3/tests/data/jsonschema/all_of_any_of/direct.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/all_of_any_of/direct.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,45 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Target": {
+      "allOf": [
+        {
+          "anyOf": [
+            {
+              "type": "object",
+              "properties": {
+                "first": {
+                  "type": "string"
+                }
+              },
+              "required": [
+                "first"
+              ]
+            },
+            {
+              "type": "object",
+              "properties": {
+                "second": {
+                  "type": "string"
+                }
+              },
+              "required": [
+                "second"
+              ]
+            }
+          ]
+        },
+        {
+          "properties": {
+            "third": {
+              "type": "string"
+            }
+          },
+          "required": [
+            "third"
+          ]
+        }
+      ]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_any_of/reference.json 0.45.0-1/tests/data/jsonschema/all_of_any_of/reference.json
--- 0.26.4-3/tests/data/jsonschema/all_of_any_of/reference.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/all_of_any_of/reference.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,49 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "First": {
+      "properties": {
+        "first": {
+          "type": "string"
+        }
+      },
+      "required": [
+        "first"
+      ]
+    },
+    "Second": {
+      "properties": {
+        "second": {
+          "type": "string"
+        }
+      },
+      "required": [
+        "second"
+      ]
+    },
+    "Target": {
+      "allOf": [
+        {
+          "anyOf": [
+            {
+              "$ref": "#/definitions/First"
+            },
+            {
+              "$ref": "#/definitions/Second"
+            }
+          ]
+        },
+        {
+          "properties": {
+            "third": {
+              "type": "string"
+            }
+          },
+          "required": [
+            "third"
+          ]
+        }
+      ]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_any_of_base_class_ref.json 0.45.0-1/tests/data/jsonschema/all_of_any_of_base_class_ref.json
--- 0.26.4-3/tests/data/jsonschema/all_of_any_of_base_class_ref.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/all_of_any_of_base_class_ref.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,57 @@
+{
+  "$id": "https://example.com/schemas/MapState.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "MapState",
+  "allOf": [
+    {
+      "anyOf": [
+        {
+          "type": "object",
+          "properties": {
+            "latitude": {"type": "number", "minimum": -90, "maximum": 90},
+            "longitude": {"type": "number", "minimum": -180, "maximum": 180},
+            "zoom": {"type": "number", "minimum": 0, "maximum": 25, "default": 0},
+            "bearing": {"type": "number"},
+            "pitch": {"type": "number", "minimum": 0, "exclusiveMaximum": 90},
+            "dragRotate": {"type": "boolean"},
+            "mapSplitMode": {"type": "string", "const": "SINGLE_MAP"},
+            "isSplit": {"type": "boolean", "const": false, "default": false}
+          },
+          "required": ["latitude", "longitude", "pitch", "mapSplitMode"]
+        },
+        {
+          "type": "object",
+          "properties": {
+            "latitude": {"$ref": "#/allOf/0/anyOf/0/properties/latitude"},
+            "longitude": {"$ref": "#/allOf/0/anyOf/0/properties/longitude"},
+            "zoom": {"$ref": "#/allOf/0/anyOf/0/properties/zoom"},
+            "bearing": {"$ref": "#/allOf/0/anyOf/0/properties/bearing"},
+            "pitch": {"$ref": "#/allOf/0/anyOf/0/properties/pitch"},
+            "dragRotate": {"$ref": "#/allOf/0/anyOf/0/properties/dragRotate"},
+            "mapSplitMode": {"type": "string", "const": "SWIPE_COMPARE"},
+            "isSplit": {"type": "boolean", "const": true, "default": true}
+          },
+          "required": ["latitude", "longitude", "pitch", "mapSplitMode"]
+        }
+      ]
+    },
+    {
+      "anyOf": [
+        {
+          "type": "object",
+          "properties": {
+            "mapViewMode": {"type": "string", "const": "MODE_2D"}
+          },
+          "required": ["mapViewMode"]
+        },
+        {
+          "type": "object",
+          "properties": {
+            "mapViewMode": {"type": "string", "const": "MODE_3D"}
+          },
+          "required": ["mapViewMode"]
+        }
+      ]
+    }
+  ]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_default.json 0.45.0-1/tests/data/jsonschema/all_of_default.json
--- 0.26.4-3/tests/data/jsonschema/all_of_default.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/all_of_default.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+{
+    "type": "object",
+    "title": "Item",
+    "allOf": [{
+            "title": "Entity",
+            "type": "object"
+    }],
+    "required": [
+        "test",
+        "testarray"
+    ],
+    "properties": {
+        "test": {
+            "type": "string",
+            "default": "test123"
+        },
+        "testarray": {
+            "title": "test array",
+            "type": "array",
+            "items": {
+                "type": "string"
+            },
+            "minItems": 1,
+            "default": [
+                "test123"
+            ]
+        }
+    }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_merge_boolean_property.json 0.45.0-1/tests/data/jsonschema/all_of_merge_boolean_property.json
--- 0.26.4-3/tests/data/jsonschema/all_of_merge_boolean_property.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/all_of_merge_boolean_property.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+{
+  "$defs": {
+    "Base1": {
+      "type": "object",
+      "properties": {
+        "data": {
+          "type": "object",
+          "properties": {
+            "value": { "type": "string" }
+          }
+        }
+      }
+    },
+    "Base2": {
+      "type": "object",
+      "properties": {
+        "data": false
+      }
+    }
+  },
+  "allOf": [
+    { "$ref": "#/$defs/Base1" },
+    { "$ref": "#/$defs/Base2" }
+  ]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_merge_same_property.json 0.45.0-1/tests/data/jsonschema/all_of_merge_same_property.json
--- 0.26.4-3/tests/data/jsonschema/all_of_merge_same_property.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/all_of_merge_same_property.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+{
+  "$defs": {
+    "SelfLink": {
+      "type": "object",
+      "required": ["links"],
+      "properties": {
+        "links": {
+          "type": "object",
+          "properties": {
+            "self": { "type": "string" }
+          }
+        }
+      }
+    },
+    "CollectionLink": {
+      "type": "object",
+      "required": ["links"],
+      "properties": {
+        "links": {
+          "type": "object",
+          "properties": {
+            "collection": { "type": "string" }
+          }
+        }
+      }
+    }
+  },
+  "allOf": [
+    { "$ref": "#/$defs/SelfLink" },
+    { "$ref": "#/$defs/CollectionLink" }
+  ]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_one_of/direct.json 0.45.0-1/tests/data/jsonschema/all_of_one_of/direct.json
--- 0.26.4-3/tests/data/jsonschema/all_of_one_of/direct.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/all_of_one_of/direct.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,45 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Target": {
+      "allOf": [
+        {
+          "oneOf": [
+            {
+              "type": "object",
+              "properties": {
+                "first": {
+                  "type": "string"
+                }
+              },
+              "required": [
+                "first"
+              ]
+            },
+            {
+              "type": "object",
+              "properties": {
+                "second": {
+                  "type": "string"
+                }
+              },
+              "required": [
+                "second"
+              ]
+            }
+          ]
+        },
+        {
+          "properties": {
+            "third": {
+              "type": "string"
+            }
+          },
+          "required": [
+            "third"
+          ]
+        }
+      ]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_one_of/reference.json 0.45.0-1/tests/data/jsonschema/all_of_one_of/reference.json
--- 0.26.4-3/tests/data/jsonschema/all_of_one_of/reference.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/all_of_one_of/reference.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,49 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "First": {
+      "properties": {
+        "first": {
+          "type": "string"
+        }
+      },
+      "required": [
+        "first"
+      ]
+    },
+    "Second": {
+      "properties": {
+        "second": {
+          "type": "string"
+        }
+      },
+      "required": [
+        "second"
+      ]
+    },
+    "Target": {
+      "allOf": [
+        {
+          "oneOf": [
+            {
+              "$ref": "#/definitions/First"
+            },
+            {
+              "$ref": "#/definitions/Second"
+            }
+          ]
+        },
+        {
+          "properties": {
+            "third": {
+              "type": "string"
+            }
+          },
+          "required": [
+            "third"
+          ]
+        }
+      ]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_ref/base_test.json 0.45.0-1/tests/data/jsonschema/all_of_ref/base_test.json
--- 0.26.4-3/tests/data/jsonschema/all_of_ref/base_test.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/all_of_ref/base_test.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "$id": "base_test.json",
+  "description": "base test",
+  "type": "object",
+  "definitions": {
+    "first": {
+      "type": "object",
+      "required": [
+        "second"
+      ],
+      "properties": {
+        "second": {
+          "type": "string",
+          "description": "Second",
+          "examples": [
+            "second"
+          ]
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_ref/test.json 0.45.0-1/tests/data/jsonschema/all_of_ref/test.json
--- 0.26.4-3/tests/data/jsonschema/all_of_ref/test.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/all_of_ref/test.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "$id": "test",
+  "description": "test",
+  "type": "object",
+  "allOf": [
+    {
+      "$ref": "base_test.json#/definitions/first"
+    }
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_ref_self.json 0.45.0-1/tests/data/jsonschema/all_of_ref_self.json
--- 0.26.4-3/tests/data/jsonschema/all_of_ref_self.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/all_of_ref_self.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+{
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "$id": "https://example.com/all_of_ref_self.json",
+  "type": "object",
+  "properties": {
+    "version": {
+      "allOf": [
+        { "$ref": "#/$defs/version" }
+      ]
+    }
+  },
+  "$defs": {
+    "version": {
+      "type": "null"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_ref_with_property_override.json 0.45.0-1/tests/data/jsonschema/all_of_ref_with_property_override.json
--- 0.26.4-3/tests/data/jsonschema/all_of_ref_with_property_override.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/all_of_ref_with_property_override.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+{
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "$id": "https://example.com/person.schema.json",
+  "title": "Person",
+  "$defs": {
+    "Thing": {
+      "type": "object",
+      "properties": {
+        "type": { "type": "string" },
+        "name": { "type": "string", "minLength": 1 }
+      },
+      "required": ["type", "name"]
+    }
+  },
+  "type": "object",
+  "allOf": [
+    { "$ref": "#/$defs/Thing" }
+  ],
+  "properties": {
+    "type": { "default": "playground:Person" },
+    "name": { "minLength": 2 },
+    "age": { "type": "integer" }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/all_of_with_object.json 0.45.0-1/tests/data/jsonschema/all_of_with_object.json
--- 0.26.4-3/tests/data/jsonschema/all_of_with_object.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/all_of_with_object.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,63 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Pet",
+  "allOf": [
+    {
+      "$ref": "#/definitions/Home"
+    },
+    {
+      "$ref": "#/definitions/Kind"
+    },
+    {
+      "$ref": "#/definitions/Id"
+    },
+    {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string"
+        }
+      }
+    }
+  ],
+  "type": [
+    "object"
+  ],
+  "properties": {
+    "name": {
+      "type": "string"
+    },
+    "age": {
+      "type": "integer"
+    }
+  },
+  "definitions": {
+    "Home": {
+      "type": "object",
+      "properties": {
+        "address": {
+          "type": "string"
+        },
+        "zip": {
+          "type": "string"
+        }
+      }
+    },
+    "Kind": {
+      "type": "object",
+      "properties": {
+        "description": {
+          "type": "string"
+        }
+      }
+    },
+    "Id": {
+      "type": "object",
+      "properties": {
+        "id": {
+          "type": "integer"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/allof_enum_no_external_ref.json 0.45.0-1/tests/data/jsonschema/allof_enum_no_external_ref.json
--- 0.26.4-3/tests/data/jsonschema/allof_enum_no_external_ref.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/allof_enum_no_external_ref.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "QuantityTrunc": {
+      "type": "string",
+      "description": "Quantity truncation setting",
+      "allOf": [
+        {
+          "$ref": "#/definitions/MassUnit"
+        }
+      ]
+    },
+    "MassUnit": {
+      "type": "string",
+      "enum": ["g", "kg", "t"]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/allof_enum_ref.json 0.45.0-1/tests/data/jsonschema/allof_enum_ref.json
--- 0.26.4-3/tests/data/jsonschema/allof_enum_ref.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/allof_enum_ref.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "CreateOrderByEstimateRequest": {
+      "type": "object",
+      "properties": {
+        "quantity_trunc": {
+          "$ref": "#/definitions/QuantityTrunc"
+        }
+      }
+    },
+    "QuantityTrunc": {
+      "type": "string",
+      "description": "Quantity truncation setting",
+      "allOf": [
+        {
+          "$ref": "#/definitions/MassUnit"
+        }
+      ]
+    },
+    "MassUnit": {
+      "type": "string",
+      "enum": ["g", "kg", "t"]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/allof_root_model_constraints.json 0.45.0-1/tests/data/jsonschema/allof_root_model_constraints.json
--- 0.26.4-3/tests/data/jsonschema/allof_root_model_constraints.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/allof_root_model_constraints.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,210 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "StringDatatype": {
+      "description": "A base string type.",
+      "type": "string",
+      "pattern": "^\\S(.*\\S)?$"
+    },
+    "ConstrainedStringDatatype": {
+      "description": "A constrained string.",
+      "allOf": [
+        { "$ref": "#/definitions/StringDatatype" },
+        { "type": "string", "minLength": 1, "pattern": "^[A-Z].*" }
+      ]
+    },
+    "IntegerDatatype": {
+      "description": "A whole number.",
+      "type": "integer"
+    },
+    "NonNegativeIntegerDatatype": {
+      "description": "Non-negative integer.",
+      "allOf": [
+        { "$ref": "#/definitions/IntegerDatatype" },
+        { "minimum": 0 }
+      ]
+    },
+    "BoundedIntegerDatatype": {
+      "description": "Integer between 0 and 100.",
+      "allOf": [
+        { "$ref": "#/definitions/IntegerDatatype" },
+        { "minimum": 0, "maximum": 100 }
+      ]
+    },
+    "EmailDatatype": {
+      "description": "Email with format.",
+      "allOf": [
+        { "$ref": "#/definitions/StringDatatype" },
+        { "format": "email" }
+      ]
+    },
+    "FormattedStringDatatype": {
+      "description": "A string with email format.",
+      "type": "string",
+      "format": "email"
+    },
+    "ObjectBase": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" }
+      }
+    },
+    "ObjectWithAllOf": {
+      "description": "Object inheritance - not a root model.",
+      "allOf": [
+        { "$ref": "#/definitions/ObjectBase" },
+        { "type": "object", "properties": { "name": { "type": "string" } } }
+      ]
+    },
+    "MultiRefAllOf": {
+      "description": "Multiple refs - not handled by new code.",
+      "allOf": [
+        { "$ref": "#/definitions/StringDatatype" },
+        { "$ref": "#/definitions/IntegerDatatype" }
+      ]
+    },
+    "NoConstraintAllOf": {
+      "description": "No constraints added.",
+      "allOf": [
+        { "$ref": "#/definitions/StringDatatype" }
+      ]
+    },
+    "IncompatibleTypeAllOf": {
+      "description": "Incompatible types.",
+      "allOf": [
+        { "$ref": "#/definitions/StringDatatype" },
+        { "type": "boolean" }
+      ]
+    },
+    "ConstraintWithProperties": {
+      "description": "Constraint item has properties.",
+      "allOf": [
+        { "$ref": "#/definitions/StringDatatype" },
+        { "properties": { "extra": { "type": "string" } } }
+      ]
+    },
+    "ConstraintWithItems": {
+      "description": "Constraint item has items.",
+      "allOf": [
+        { "$ref": "#/definitions/StringDatatype" },
+        { "items": { "type": "string" } }
+      ]
+    },
+    "NumberIntegerCompatible": {
+      "description": "Number and integer are compatible.",
+      "allOf": [
+        { "$ref": "#/definitions/IntegerDatatype" },
+        { "type": "number", "minimum": 0 }
+      ]
+    },
+    "RefWithSchemaKeywords": {
+      "description": "Ref with additional schema keywords.",
+      "allOf": [
+        { "$ref": "#/definitions/StringDatatype", "minLength": 5 },
+        { "maxLength": 100 }
+      ]
+    },
+    "ArrayDatatype": {
+      "type": "array",
+      "items": { "type": "string" }
+    },
+    "RefToArrayAllOf": {
+      "description": "Ref to array - not a root model.",
+      "allOf": [
+        { "$ref": "#/definitions/ArrayDatatype" },
+        { "minItems": 1 }
+      ]
+    },
+    "ObjectNoPropsDatatype": {
+      "type": "object"
+    },
+    "RefToObjectNoPropsAllOf": {
+      "description": "Ref to object without properties - not a root model.",
+      "allOf": [
+        { "$ref": "#/definitions/ObjectNoPropsDatatype" },
+        { "minProperties": 1 }
+      ]
+    },
+    "PatternPropsDatatype": {
+      "patternProperties": {
+        "^S_": { "type": "string" }
+      }
+    },
+    "RefToPatternPropsAllOf": {
+      "description": "Ref to patternProperties - not a root model.",
+      "allOf": [
+        { "$ref": "#/definitions/PatternPropsDatatype" },
+        { "minProperties": 1 }
+      ]
+    },
+    "NestedAllOfDatatype": {
+      "allOf": [
+        { "type": "string" },
+        { "minLength": 1 }
+      ]
+    },
+    "RefToNestedAllOfAllOf": {
+      "description": "Ref to nested allOf - not a root model.",
+      "allOf": [
+        { "$ref": "#/definitions/NestedAllOfDatatype" },
+        { "maxLength": 100 }
+      ]
+    },
+    "ConstraintsOnlyDatatype": {
+      "description": "Constraints only, no type.",
+      "minLength": 1,
+      "pattern": "^[A-Z]"
+    },
+    "RefToConstraintsOnlyAllOf": {
+      "description": "Ref to constraints-only schema.",
+      "allOf": [
+        { "$ref": "#/definitions/ConstraintsOnlyDatatype" },
+        { "maxLength": 100 }
+      ]
+    },
+    "NoDescriptionAllOf": {
+      "allOf": [
+        { "$ref": "#/definitions/StringDatatype" },
+        { "minLength": 5 }
+      ]
+    },
+    "EmptyConstraintItemAllOf": {
+      "description": "AllOf with empty constraint item.",
+      "allOf": [
+        { "$ref": "#/definitions/StringDatatype" },
+        {},
+        { "maxLength": 50 }
+      ]
+    },
+    "ConflictingFormatAllOf": {
+      "description": "Conflicting formats - falls back to existing behavior.",
+      "allOf": [
+        { "$ref": "#/definitions/FormattedStringDatatype" },
+        { "format": "date-time" }
+      ]
+    }
+  },
+  "type": "object",
+  "properties": {
+    "name": { "$ref": "#/definitions/ConstrainedStringDatatype" },
+    "count": { "$ref": "#/definitions/NonNegativeIntegerDatatype" },
+    "percentage": { "$ref": "#/definitions/BoundedIntegerDatatype" },
+    "email": { "$ref": "#/definitions/EmailDatatype" },
+    "obj": { "$ref": "#/definitions/ObjectWithAllOf" },
+    "multi": { "$ref": "#/definitions/MultiRefAllOf" },
+    "noconstraint": { "$ref": "#/definitions/NoConstraintAllOf" },
+    "incompatible": { "$ref": "#/definitions/IncompatibleTypeAllOf" },
+    "withprops": { "$ref": "#/definitions/ConstraintWithProperties" },
+    "withitems": { "$ref": "#/definitions/ConstraintWithItems" },
+    "numint": { "$ref": "#/definitions/NumberIntegerCompatible" },
+    "refwithkw": { "$ref": "#/definitions/RefWithSchemaKeywords" },
+    "refarr": { "$ref": "#/definitions/RefToArrayAllOf" },
+    "refobjnoprops": { "$ref": "#/definitions/RefToObjectNoPropsAllOf" },
+    "refpatternprops": { "$ref": "#/definitions/RefToPatternPropsAllOf" },
+    "refnestedallof": { "$ref": "#/definitions/RefToNestedAllOfAllOf" },
+    "refconstraintsonly": { "$ref": "#/definitions/RefToConstraintsOnlyAllOf" },
+    "nodescription": { "$ref": "#/definitions/NoDescriptionAllOf" },
+    "emptyconstraint": { "$ref": "#/definitions/EmptyConstraintItemAllOf" },
+    "conflictingformat": { "$ref": "#/definitions/ConflictingFormatAllOf" }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/anyof_const_enum_nested.yaml 0.45.0-1/tests/data/jsonschema/anyof_const_enum_nested.yaml
--- 0.26.4-3/tests/data/jsonschema/anyof_const_enum_nested.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/anyof_const_enum_nested.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+$schema: http://json-schema.org/draft-07/schema#
+type: object
+title: Config
+properties:
+  mode:
+    title: Mode
+    type: string
+    anyOf:
+      - title: fast
+        const: fast
+      - title: slow
+        const: slow
+  modes:
+    type: array
+    items:
+      type: string
+      anyOf:
+        - const: a
+        - const: b
diff -pruN 0.26.4-3/tests/data/jsonschema/array_field_constraints.json 0.45.0-1/tests/data/jsonschema/array_field_constraints.json
--- 0.26.4-3/tests/data/jsonschema/array_field_constraints.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/array_field_constraints.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "title": "TestSchema",
+  "description": "For the test",
+  "properties": {
+    "numbers": {
+      "type": "array",
+      "description": "A list of numbers",
+      "items": {
+        "type": "string",
+        "pattern": "^\\d{1,15}$",
+        "description": "Just a number",
+        "examples": [
+          "1",
+          "5464446",
+          "684572369854259"
+        ]
+      }
+    }
+  },
+  "required": [
+    "numbers"
+  ]
+}
+
diff -pruN 0.26.4-3/tests/data/jsonschema/array_in_additional_properties.json 0.45.0-1/tests/data/jsonschema/array_in_additional_properties.json
--- 0.26.4-3/tests/data/jsonschema/array_in_additional_properties.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/array_in_additional_properties.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "$id": "https://example.com/person.schema.json",
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "title": "my JSON of list of string",
+  "type": "object",
+  "additionalProperties": {
+    "type": "array",
+    "items": [
+      {
+        "type": "string"
+      }
+    ]
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/boolean_property.json 0.45.0-1/tests/data/jsonschema/boolean_property.json
--- 0.26.4-3/tests/data/jsonschema/boolean_property.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/boolean_property.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "type": "object",
+  "properties": {
+    "field": true
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/bundled_schema_with_id.json 0.45.0-1/tests/data/jsonschema/bundled_schema_with_id.json
--- 0.26.4-3/tests/data/jsonschema/bundled_schema_with_id.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/bundled_schema_with_id.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,45 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "$id": "https://example.org/schemas/bundled.json",
+  "title": "Bundled Schema",
+  "description": "A bundled schema with $id that differs from fetch URL (Issue #1798)",
+  "type": "object",
+  "properties": {
+    "user": {
+      "$ref": "#/definitions/user"
+    },
+    "pet": {
+      "$ref": "#/definitions/pet"
+    }
+  },
+  "definitions": {
+    "user": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string"
+        },
+        "pet": {
+          "$ref": "#/definitions/pet"
+        }
+      },
+      "required": ["name"]
+    },
+    "pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string"
+        },
+        "species": {
+          "$ref": "#/definitions/species"
+        }
+      },
+      "required": ["name", "species"]
+    },
+    "species": {
+      "type": "string",
+      "enum": ["dog", "cat", "bird"]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/circular_reference.json 0.45.0-1/tests/data/jsonschema/circular_reference.json
--- 0.26.4-3/tests/data/jsonschema/circular_reference.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/circular_reference.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,49 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Member",
+  "$ref": "#/definitions/user",
+  "definitions": {
+    "user": {
+      "title": "User",
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string"
+        },
+        "pet": {
+          "$ref": "#/definitions/animal"
+        },
+        "home": {
+          "$ref": "#/definitions/house"
+        }
+      }
+    },
+    "animal": {
+      "title": "Animal",
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string"
+        },
+        "breeder": {
+          "$ref": "#/definitions/user"
+        },
+        "home": {
+          "$ref": "#/definitions/house"
+        }
+      }
+    },
+    "house": {
+      "title": "House",
+      "type": "object",
+      "properties": {
+        "address": {
+          "type": "string"
+        },
+        "owner": {
+          "$ref": "#/definitions/user"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/collapse_root_models_empty_union.json 0.45.0-1/tests/data/jsonschema/collapse_root_models_empty_union.json
--- 0.26.4-3/tests/data/jsonschema/collapse_root_models_empty_union.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/collapse_root_models_empty_union.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+{
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "type": "object",
+  "properties": {
+    "field": {
+      "anyOf": [
+        {"$ref": "#/$defs/NullType1"},
+        {"$ref": "#/$defs/NullType2"}
+      ]
+    }
+  },
+  "$defs": {
+    "NullType1": {
+      "type": "null"
+    },
+    "NullType2": {
+      "type": "null"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/collapse_root_models_nested_reference.json 0.45.0-1/tests/data/jsonschema/collapse_root_models_nested_reference.json
--- 0.26.4-3/tests/data/jsonschema/collapse_root_models_nested_reference.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/collapse_root_models_nested_reference.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,31 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "refs": {
+      "$ref": "#/$defs/SomeRefs"
+    }
+  },
+  "required": [
+    "refs"
+  ],
+  "$defs": {
+    "SomeRefs": {
+      "type": "array",
+      "items": {
+        "$ref": "#/$defs/SomeRef"
+      }
+    },
+    "SomeRef": {
+      "type": "object",
+      "properties": {
+        "id": {
+          "type": "string"
+        }
+      },
+      "required": [
+        "id"
+      ]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/collapse_root_models_with_optional.json 0.45.0-1/tests/data/jsonschema/collapse_root_models_with_optional.json
--- 0.26.4-3/tests/data/jsonschema/collapse_root_models_with_optional.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/collapse_root_models_with_optional.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+{
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "type": "object",
+  "properties": {
+    "field": {
+      "anyOf": [
+        {"$ref": "#/$defs/StringType"},
+        {"$ref": "#/$defs/NullType"}
+      ]
+    }
+  },
+  "$defs": {
+    "StringType": {
+      "type": "string"
+    },
+    "NullType": {
+      "type": "null"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/combine_any_of_object.json 0.45.0-1/tests/data/jsonschema/combine_any_of_object.json
--- 0.26.4-3/tests/data/jsonschema/combine_any_of_object.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/combine_any_of_object.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,42 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "title": "My schema",
+    "additionalProperties": true,
+    "properties": {
+        "AddressLine1": { "type": "string" },
+        "AddressLine2": { "type": "string" },
+        "City":         { "type": "string" }
+    },
+    "required": [ "AddressLine1" ],
+    "anyOf": [
+        {
+            "type": "object",
+            "properties": {
+                "State":   { "type": "string" },
+                "ZipCode": { "type": "string" }
+            },
+            "required": [ "ZipCode" ]
+        },
+        {
+            "type": "object",
+            "properties": {
+                "County":   { "type": "string" },
+                "PostCode": { "type": "string" }
+            },
+            "required": [ "PostCode" ]
+        },
+        { "$ref": "#/definitions/US" }
+    ],
+    "definitions": {
+        "US":  {
+            "type": "object",
+            "properties": {
+                "County":   { "type": "string" },
+                "PostCode": { "type": "string" }
+            },
+            "required": [ "PostCode" ]
+        }
+    }
+}
+
diff -pruN 0.26.4-3/tests/data/jsonschema/combine_one_of_object.json 0.45.0-1/tests/data/jsonschema/combine_one_of_object.json
--- 0.26.4-3/tests/data/jsonschema/combine_one_of_object.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/combine_one_of_object.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,42 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "type": "object",
+    "title": "My schema",
+    "additionalProperties": true,
+    "properties": {
+        "AddressLine1": { "type": "string" },
+        "AddressLine2": { "type": "string" },
+        "City":         { "type": "string" }
+    },
+    "required": [ "AddressLine1" ],
+    "oneOf": [
+        {
+            "type": "object",
+            "properties": {
+                "State":   { "type": "string" },
+                "ZipCode": { "type": "string" }
+            },
+            "required": [ "ZipCode" ]
+        },
+        {
+            "type": "object",
+            "properties": {
+                "County":   { "type": "string" },
+                "PostCode": { "type": "string" }
+            },
+            "required": [ "PostCode" ]
+        },
+        { "$ref": "#/definitions/US" }
+    ],
+    "definitions": {
+        "US":  {
+            "type": "object",
+            "properties": {
+                "County":   { "type": "string" },
+                "PostCode": { "type": "string" }
+            },
+            "required": [ "PostCode" ]
+        }
+    }
+}
+
diff -pruN 0.26.4-3/tests/data/jsonschema/combined_array.json 0.45.0-1/tests/data/jsonschema/combined_array.json
--- 0.26.4-3/tests/data/jsonschema/combined_array.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/combined_array.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,209 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Pet",
+  "type": [
+    "object",
+    "array"
+  ],
+  "items": {
+    "$ref": "#"
+  },
+  "properties": {
+    "name": {
+      "type": "string"
+    },
+    "age": {
+      "type": "integer"
+    }
+  },
+  "definitions": {
+    "CombinedEnum": {
+      "type": [
+        "array",
+        "string"
+      ],
+      "items": {
+        "$ref": "#/definitions/Kind"
+      },
+      "enum": [
+        "green",
+        "red"
+      ]
+    },
+    "CombinedAllOf": {
+      "type": [
+        "array"
+      ],
+      "items": {
+        "$ref": "#/definitions/Kind"
+      },
+      "allOf": [
+        {
+          "$ref": "#/definitions/Kind"
+        },
+        {
+          "$ref": "#/definitions/Id"
+        },
+        {
+          "$ref": "#/definitions/CustomRootModel"
+        }
+      ]
+    },
+    "CombinedObjectField": {
+      "type": "object",
+      "properties": {
+        "CombinedEnumField": {
+          "type": [
+            "array",
+            "string"
+          ],
+          "items": {
+            "$ref": "#/definitions/Kind"
+          },
+          "enum": [
+            "green",
+            "red"
+          ]
+        },
+        "CombinedAllOfField": {
+          "type": [
+            "array"
+          ],
+          "items": {
+            "$ref": "#/definitions/Kind"
+          },
+          "allOf": [
+            {
+              "$ref": "#/definitions/Kind"
+            },
+            {
+              "$ref": "#/definitions/Id"
+            },
+            {
+              "$ref": "#/definitions/CustomRootModel"
+            }
+          ]
+        },
+        "CombinedObjectField": {
+          "type": [
+            "array",
+            "object"
+          ],
+          "items": {
+            "$ref": "#/definitions/Kind"
+          },
+          "properties": {
+            "color": {
+              "type": "string"
+            }
+          }
+        },
+        "CombinedAllOfObjectField": {
+          "type": [
+            "array",
+            "object"
+          ],
+          "properties": {
+            "color": {
+              "type": "string"
+            }
+          },
+          "items": {
+            "$ref": "#/definitions/Kind"
+          },
+          "allOf": [
+            {
+              "$ref": "#/definitions/Kind"
+            },
+            {
+              "$ref": "#/definitions/Id"
+            },
+            {
+              "$ref": "#/definitions/CustomRootModel"
+            }
+          ]
+        }
+      }
+    },
+    "CombinedSelf": {
+      "type": [
+        "array",
+        "object"
+      ],
+      "items": {
+        "$ref": "#/definitions/CombinedSelf"
+      },
+      "properties": {
+        "color": {
+          "type": "string"
+        }
+      }
+    },
+    "CombinedSelfEnum": {
+      "type": [
+        "array",
+        "object"
+      ],
+      "items": {
+        "$ref": "#/definitions/CombinedSelfEnum"
+      },
+      "properties": {
+        "color": {
+          "type": "string"
+        }
+      },
+      "enum": [
+        "green",
+        "red"
+      ]
+    },
+    "CombinedSelfAllOf": {
+      "type": [
+        "array",
+        "object"
+      ],
+      "items": {
+        "$ref": "#/definitions/CombinedSelfAllOf"
+      },
+      "properties": {
+        "color": {
+          "type": "string"
+        }
+      },
+      "enum": [
+        "green",
+        "red"
+      ],
+      "allOf": [
+        {
+          "$ref": "#/definitions/Kind"
+        },
+        {
+          "$ref": "#/definitions/Id"
+        },
+        {
+          "$ref": "#/definitions/CustomRootModel"
+        }
+      ]
+    },
+    "Kind": {
+      "type": "object",
+      "properties": {
+        "description": {
+          "type": "string"
+        }
+      }
+    },
+    "Id": {
+      "type": "object",
+      "properties": {
+        "id": {
+          "type": "integer"
+        }
+      }
+    },
+    "CustomRootModel": {
+      "type": "string"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/complex_any_of.json 0.45.0-1/tests/data/jsonschema/complex_any_of.json
--- 0.26.4-3/tests/data/jsonschema/complex_any_of.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/complex_any_of.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,46 @@
+{
+  "$schema": "http://json-schema.org/draft/2019-09/schema#",
+  "anyOf": [
+    {
+      "type": "integer",
+      "$comment": "int"
+    },
+    {
+      "type": "array",
+      "items": {
+        "type": "object",
+        "properties": {
+          "key": {
+            "type": "object",
+            "properties": {
+              "address": {
+                "type": "string",
+                "$comment": "address"
+              },
+              "nat": {
+                "type": "string",
+                "$comment": "nat"
+              }
+            },
+            "required": [
+              "address",
+              "nat"
+            ],
+            "additionalProperties": false,
+            "$comment": "pair"
+          },
+          "value": {
+            "type": "string",
+            "$comment": "nat"
+          }
+        },
+        "required": [
+          "key",
+          "value"
+        ],
+        "additionalProperties": false
+      }
+    }
+  ],
+  "$comment": "big_map"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/complex_one_of.json 0.45.0-1/tests/data/jsonschema/complex_one_of.json
--- 0.26.4-3/tests/data/jsonschema/complex_one_of.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/complex_one_of.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,46 @@
+{
+  "$schema": "http://json-schema.org/draft/2019-09/schema#",
+  "oneOf": [
+    {
+      "type": "integer",
+      "$comment": "int"
+    },
+    {
+      "type": "array",
+      "items": {
+        "type": "object",
+        "properties": {
+          "key": {
+            "type": "object",
+            "properties": {
+              "address": {
+                "type": "string",
+                "$comment": "address"
+              },
+              "nat": {
+                "type": "string",
+                "$comment": "nat"
+              }
+            },
+            "required": [
+              "address",
+              "nat"
+            ],
+            "additionalProperties": false,
+            "$comment": "pair"
+          },
+          "value": {
+            "type": "string",
+            "$comment": "nat"
+          }
+        },
+        "required": [
+          "key",
+          "value"
+        ],
+        "additionalProperties": false
+      }
+    }
+  ],
+  "$comment": "big_map"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/complicated_enum.json 0.45.0-1/tests/data/jsonschema/complicated_enum.json
--- 0.26.4-3/tests/data/jsonschema/complicated_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/complicated_enum.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,39 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "ProcessingStatus": {
+      "title": "ProcessingStatus",
+      "enum": [
+        "COMPLETED",
+        "PENDING",
+        "FAILED"
+      ],
+      "type": "string",
+      "description": "The processing status"
+    },
+    "kind": {"type": "string"}
+  },
+  "title": "ProcessingTask",
+  "type": "object",
+  "properties": {
+    "processing_status_union": {
+      "allOf": [
+        { "type": "string"},
+        {
+          "$ref": "#/definitions/ProcessingStatus"
+        }
+      ],
+      "default": "COMPLETED"
+    },
+    "processing_status": {
+      "$ref": "#/definitions/ProcessingStatus",
+      "default": "COMPLETED"
+    },
+    "name": {
+      "type": "string"
+    },
+    "kind": {
+      "$ref": "#/definitions/kind"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/const.json 0.45.0-1/tests/data/jsonschema/const.json
--- 0.26.4-3/tests/data/jsonschema/const.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/const.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "title": "Const",
+  "properties": {
+    "foo": {
+      "const": "foo"
+    }
+  },
+  "required": ["foo"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/custom_base_path.json 0.45.0-1/tests/data/jsonschema/custom_base_path.json
--- 0.26.4-3/tests/data/jsonschema/custom_base_path.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/custom_base_path.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,59 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "House",
+  "type": "object",
+  "customBasePath": "custom.models.Property",
+  "properties": {
+    "address": {
+      "type": "string"
+    },
+    "owner": {
+      "type": "object",
+      "customBasePath": "custom.models.Person",
+      "properties": {
+        "job": {
+          "type": "string"
+        },
+        "spouse": {
+          "type": "object",
+          "customBasePath": "custom.models.Person",
+          "properties": {
+            "job": {
+              "type": "string"
+            }
+          }
+        },
+        "children": {
+          "type": "array",
+          "items": {
+            "type": "object",
+            "customBasePath": "custom.models.Person",
+            "properties": {
+              "school": {
+                "type": "string"
+              },
+              "grade": {
+                "type": "number"
+              },
+              "pets": {
+                "type": "array",
+                "items": {
+                  "type": "object",
+                  "customBasePath": "custom.models.Animal",
+                  "properties": {
+                    "name": {
+                      "type": "string"
+                    }
+                  }
+                }
+              }
+            }
+          }
+        }
+      }
+    }
+  },
+  "required": [
+    "address"
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/custom_type_path.json 0.45.0-1/tests/data/jsonschema/custom_type_path.json
--- 0.26.4-3/tests/data/jsonschema/custom_type_path.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/custom_type_path.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Person",
+  "type": "object",
+  "properties": {
+    "firstName": {
+      "type": "string",
+      "description": "The person's first name.",
+      "customTypePath": "custom.TitleString"
+    },
+    "lastName": {
+      "type": "string",
+      "description": "The person's last name.",
+      "customTypePath": "custom.special.UpperString"
+    },
+    "age": {
+      "description": "Age in years which must be equal to or greater than zero.",
+      "type": "integer",
+      "minimum": 0,
+      "customTypePath": "custom.special.numbers.Age"
+    },
+    "friends": {
+      "type": "array",
+      "customTypePath": "custom.collection.array.Friends"
+    },
+    "comment": {
+      "type": "null",
+      "customTypePath": "custom.MultipleLineString"
+    }
+  },
+  "definitions": {
+    "RootedCustomType": {
+      "type": "string",
+      "customTypePath": "custom.SpecialString"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/datetime.json 0.45.0-1/tests/data/jsonschema/datetime.json
--- 0.26.4-3/tests/data/jsonschema/datetime.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/datetime.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "User": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "example": "ken"
+        },
+        "birthDay": {
+          "type": "string",
+          "format": "date-time",
+          "example": "2016-08-29T09:12:33.001Z"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/discriminator_literals.json 0.45.0-1/tests/data/jsonschema/discriminator_literals.json
--- 0.26.4-3/tests/data/jsonschema/discriminator_literals.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/discriminator_literals.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,62 @@
+{
+  "$defs": {
+    "Type1": {
+      "properties": {
+        "type_": {
+          "const": "a",
+          "default": "a",
+          "title": "Type "
+        }
+      },
+      "title": "Type1",
+      "type": "object"
+    },
+    "Type2": {
+      "properties": {
+        "type_": {
+          "const": "b",
+          "default": "b",
+          "title": "Type "
+        }
+      },
+      "title": "Type2",
+      "type": "object"
+    },
+    "UnrelatedType": {
+      "properties": {
+        "info": {
+          "default": "Unrelated type, not involved in the discriminated union",
+          "title": "A way to check for side effects",
+          "type": "string"
+        }
+      },
+      "title": "UnrelatedType",
+      "type": "object"
+    }
+  },
+  "properties": {
+    "inner": {
+      "discriminator": {
+        "mapping": {
+          "a": "#/$defs/Type1",
+          "b": "#/$defs/Type2"
+        },
+        "propertyName": "type_"
+      },
+      "oneOf": [
+        {
+          "$ref": "#/$defs/Type1"
+        },
+        {
+          "$ref": "#/$defs/Type2"
+        }
+      ],
+      "title": "Inner"
+    }
+  },
+  "required": [
+    "inner"
+  ],
+  "title": "Response",
+  "type": "object"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/discriminator_no_mapping.json 0.45.0-1/tests/data/jsonschema/discriminator_no_mapping.json
--- 0.26.4-3/tests/data/jsonschema/discriminator_no_mapping.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/discriminator_no_mapping.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,47 @@
+{
+  "$defs": {
+    "Cat": {
+      "properties": {
+        "pet_type": {
+          "const": "cat"
+        }
+      },
+      "required": [
+        "pet_type"
+      ],
+      "type": "object"
+    },
+    "Dog": {
+      "properties": {
+        "pet_type": {
+          "const": "dog"
+        }
+      },
+      "required": [
+        "pet_type"
+      ],
+      "type": "object"
+    }
+  },
+  "properties": {
+    "pet": {
+      "discriminator": {
+        "propertyName": "pet_type"
+      },
+      "oneOf": [
+        {
+          "$ref": "#/$defs/Cat"
+        },
+        {
+          "$ref": "#/$defs/Dog"
+        }
+      ],
+      "title": "Pet"
+    }
+  },
+  "required": [
+    "pet"
+  ],
+  "title": "Animal",
+  "type": "object"
+} 
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/artificial_folder/type-1.json 0.45.0-1/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/artificial_folder/type-1.json
--- 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/artificial_folder/type-1.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/artificial_folder/type-1.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "properties": {
+    "type_": {
+      "const": "a",
+      "default": "a",
+      "title": "Type "
+    }
+  },
+  "title": "Type1",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/schema.json 0.45.0-1/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/schema.json
--- 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,52 @@
+{
+  "$def": {
+    "Type3": {
+      "properties": {
+        "type_": {
+          "const": "c",
+          "default": "c",
+          "title": "Type "
+        }
+      },
+      "title": "Type3",
+      "type": "object"
+    }
+  },
+  "properties": {
+    "inner": {
+      "discriminator": {
+        "mapping": {
+          "a": "./artificial_folder/type-1.json",
+          "b": "./type-2.json",
+          "c": "#/$def/Type3",
+          "d": "../type-4.json",
+          "e": "../subfolder/type-5.json"
+        },
+        "propertyName": "type_"
+      },
+      "oneOf": [
+        {
+          "$ref": "./artificial_folder/type-1.json"
+        },
+        {
+          "$ref": "./type-2.json"
+        },
+        {
+          "$ref": "#/$def/Type3"
+        },
+        {
+          "$ref": "../type-4.json"
+        },
+        {
+          "$ref": "../subfolder/type-5.json"
+        }
+      ],
+      "title": "Inner"
+    }
+  },
+  "required": [
+    "inner"
+  ],
+  "title": "Response",
+  "type": "object"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/type-2.json 0.45.0-1/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/type-2.json
--- 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/type-2.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/discriminator_with_external_reference/inner_folder/type-2.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "properties": {
+    "type_": {
+      "const": "b",
+      "default": "b",
+      "title": "Type "
+    },
+    "ref_type": {
+      "$ref": "./artificial_folder/type-1.json",
+      "description": "A referenced type."
+    }
+  },
+  "title": "Type2",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/subfolder/type-5.json 0.45.0-1/tests/data/jsonschema/discriminator_with_external_reference/subfolder/type-5.json
--- 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/subfolder/type-5.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/discriminator_with_external_reference/subfolder/type-5.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "properties": {
+    "type_": {
+      "const": "e",
+      "default": "e",
+      "title": "Type "
+    }
+  },
+  "title": "Type5",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/type-4.json 0.45.0-1/tests/data/jsonschema/discriminator_with_external_reference/type-4.json
--- 0.26.4-3/tests/data/jsonschema/discriminator_with_external_reference/type-4.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/discriminator_with_external_reference/type-4.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "properties": {
+    "type_": {
+      "const": "d",
+      "default": "d",
+      "title": "Type "
+    }
+  },
+  "title": "Type4",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/discriminator_with_meta_msgspec.json 0.45.0-1/tests/data/jsonschema/discriminator_with_meta_msgspec.json
--- 0.26.4-3/tests/data/jsonschema/discriminator_with_meta_msgspec.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/discriminator_with_meta_msgspec.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,50 @@
+{
+  "$defs": {
+    "SystemMessage": {
+      "type": "object",
+      "properties": {
+        "role": {
+          "type": "string",
+          "const": "system",
+          "default": "system",
+          "title": "Message Role"
+        },
+        "content": {
+          "type": "string"
+        }
+      },
+      "required": ["role", "content"]
+    },
+    "UserMessage": {
+      "type": "object",
+      "properties": {
+        "role": {
+          "type": "string",
+          "const": "user",
+          "default": "user",
+          "title": "Message Role"
+        },
+        "content": {
+          "type": "string"
+        }
+      },
+      "required": ["role", "content"]
+    }
+  },
+  "type": "object",
+  "properties": {
+    "message": {
+      "discriminator": {
+        "propertyName": "role",
+        "mapping": {
+          "system": "#/$defs/SystemMessage",
+          "user": "#/$defs/UserMessage"
+        }
+      },
+      "oneOf": [
+        {"$ref": "#/$defs/SystemMessage"},
+        {"$ref": "#/$defs/UserMessage"}
+      ]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/discriminator_with_type_string.json 0.45.0-1/tests/data/jsonschema/discriminator_with_type_string.json
--- 0.26.4-3/tests/data/jsonschema/discriminator_with_type_string.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/discriminator_with_type_string.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,48 @@
+{
+  "$defs": {
+    "SystemMessage": {
+      "type": "object",
+      "properties": {
+        "role": {
+          "type": "string",
+          "const": "system",
+          "default": "system"
+        },
+        "content": {
+          "type": "string"
+        }
+      },
+      "required": ["role", "content"]
+    },
+    "UserMessage": {
+      "type": "object",
+      "properties": {
+        "role": {
+          "type": "string",
+          "const": "user",
+          "default": "user"
+        },
+        "content": {
+          "type": "string"
+        }
+      },
+      "required": ["role", "content"]
+    }
+  },
+  "type": "object",
+  "properties": {
+    "message": {
+      "discriminator": {
+        "propertyName": "role",
+        "mapping": {
+          "system": "#/$defs/SystemMessage",
+          "user": "#/$defs/UserMessage"
+        }
+      },
+      "oneOf": [
+        {"$ref": "#/$defs/SystemMessage"},
+        {"$ref": "#/$defs/UserMessage"}
+      ]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/duplicate_enum.json 0.45.0-1/tests/data/jsonschema/duplicate_enum.json
--- 0.26.4-3/tests/data/jsonschema/duplicate_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/duplicate_enum.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,51 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema",
+  "title": "User",
+  "type": "object",
+  "properties": {
+    "name": {
+      "type": "string"
+    },
+    "animal": {
+      "type": "string",
+      "enum": [
+        "dog",
+        "cat",
+        "snake"
+      ],
+      "default": "dog"
+    },
+    "pet": {
+      "type": "string",
+      "enum": [
+        "dog",
+        "cat",
+        "snake"
+      ],
+      "default": "cat"
+    },
+    "redistribute": {
+      "type": "array",
+      "items": {
+        "type": "string",
+        "enum": [
+          "static",
+          "connected"
+        ]
+      }
+    }
+  },
+  "definitions": {
+    "redistribute": {
+      "type": "array",
+      "items": {
+        "type": "string",
+        "enum": [
+          "static",
+          "connected"
+        ]
+      },
+      "description": "Redistribute type for routes."
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/duplicate_field_constraints/common.yaml 0.45.0-1/tests/data/jsonschema/duplicate_field_constraints/common.yaml
--- 0.26.4-3/tests/data/jsonschema/duplicate_field_constraints/common.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/duplicate_field_constraints/common.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+---
+$schema: https://json-schema.org/draft/2020-12/schema
+$id: common.yaml
+definitions:
+  ulid:
+    type: string
+    pattern: '[0-9ABCDEFGHJKMNPQRSTVWXYZ]{26,26}'
diff -pruN 0.26.4-3/tests/data/jsonschema/duplicate_field_constraints/test.yaml 0.45.0-1/tests/data/jsonschema/duplicate_field_constraints/test.yaml
--- 0.26.4-3/tests/data/jsonschema/duplicate_field_constraints/test.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/duplicate_field_constraints/test.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+---
+$schema: https://json-schema.org/draft/2020-12/schema
+$id: test.yaml
+title: test
+required:
+  - uid
+properties:
+  uid:
+    description: ulid of this object
+    $ref: ./common.yaml#/definitions/ulid
diff -pruN 0.26.4-3/tests/data/jsonschema/duplicate_name/bar.json 0.45.0-1/tests/data/jsonschema/duplicate_name/bar.json
--- 0.26.4-3/tests/data/jsonschema/duplicate_name/bar.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/duplicate_name/bar.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+{
+  "$id": "bar.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Bar",
+  "type": "object",
+  "definitions": {
+    "logLevels": {
+      "description": "Supported logging levels",
+      "type": "string"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/duplicate_name/foo.json 0.45.0-1/tests/data/jsonschema/duplicate_name/foo.json
--- 0.26.4-3/tests/data/jsonschema/duplicate_name/foo.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/duplicate_name/foo.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+{
+  "$id": "foo.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Foo",
+  "type": "object",
+  "definitions": {
+    "logLevels": {
+      "description": "Supported logging levels",
+      "type": "string"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/duration.json 0.45.0-1/tests/data/jsonschema/duration.json
--- 0.26.4-3/tests/data/jsonschema/duration.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/duration.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Test": {
+      "type": "object",
+      "properties": {
+        "s_duration": {
+          "type": "string",
+          "format": "duration",
+          "example": "PT2H33M3S"
+        }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/empty_items_array.json 0.45.0-1/tests/data/jsonschema/empty_items_array.json
--- 0.26.4-3/tests/data/jsonschema/empty_items_array.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/empty_items_array.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "datum_kwargs": {
+      "additionalProperties": {
+        "items": {},
+        "type": "array"
+      },
+      "type": "object"
+    },
+    "simple_list": {
+      "items": {},
+      "type": "array"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/encoding_test.json 0.45.0-1/tests/data/jsonschema/encoding_test.json
--- 0.26.4-3/tests/data/jsonschema/encoding_test.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/encoding_test.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "日本語Model",
+  "description": "モデルの説明文",
+  "type": "object",
+  "properties": {
+    "名前": {
+      "type": "string",
+      "description": "ユーザー名"
+    },
+    "年齢": {
+      "type": "integer"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/enum_in_root/enum_in_root.json 0.45.0-1/tests/data/jsonschema/enum_in_root/enum_in_root.json
--- 0.26.4-3/tests/data/jsonschema/enum_in_root/enum_in_root.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/enum_in_root/enum_in_root.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "type": "object",
+  "properties": {
+    "orderReference": {
+      "type": "string",
+      "examples": [
+        "27378669"
+      ],
+      "description": "Reference number of the order"
+    },
+    "brand": {
+       "$ref": "./schema.json#/$defs/brand",
+      "$id": "#root/brand"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/enum_in_root/schema.json 0.45.0-1/tests/data/jsonschema/enum_in_root/schema.json
--- 0.26.4-3/tests/data/jsonschema/enum_in_root/schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/enum_in_root/schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "$defs": {
+    "brand": {
+      "type": "string",
+      "enum": [
+        "OPUS",
+        "someday"
+      ],
+      "description": "purchased brand"
+    }
+  }
+}
+
diff -pruN 0.26.4-3/tests/data/jsonschema/enum_object_values.json 0.45.0-1/tests/data/jsonschema/enum_object_values.json
--- 0.26.4-3/tests/data/jsonschema/enum_object_values.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/enum_object_values.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "EnumObjectTest",
+  "type": "object",
+  "properties": {
+    "country_with_title": {
+      "enum": [
+        {"title": "USA", "code": "US"},
+        {"title": "CANADA", "code": "CA"}
+      ]
+    },
+    "country_with_name": {
+      "enum": [
+        {"name": "USA", "code": "US"},
+        {"name": "CANADA", "code": "CA"}
+      ]
+    },
+    "country_with_const": {
+      "enum": [
+        {"const": "us_value"},
+        {"const": "ca_value"}
+      ]
+    },
+    "country_no_identifier": {
+      "enum": [
+        {"code": "US"},
+        {"code": "CA"}
+      ]
+    },
+    "country_duplicate_title": {
+      "enum": [
+        {"title": "SAME", "code": "US"},
+        {"title": "SAME", "code": "CA"}
+      ]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/external_child.json 0.45.0-1/tests/data/jsonschema/external_child.json
--- 0.26.4-3/tests/data/jsonschema/external_child.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_child.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "Json": {
+    "type": "object",
+    "properties": {
+      "firstName": {
+        "type": "string"
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_child.yaml 0.45.0-1/tests/data/jsonschema/external_child.yaml
--- 0.26.4-3/tests/data/jsonschema/external_child.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_child.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+Yaml:
+  type: "object"
+  properties:
+    firstName:
+      type: "string"
+
+
diff -pruN 0.26.4-3/tests/data/jsonschema/external_child_root.json 0.45.0-1/tests/data/jsonschema/external_child_root.json
--- 0.26.4-3/tests/data/jsonschema/external_child_root.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_child_root.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/schema#",
+  "type": "object",
+  "title": "Object",
+  "properties": {
+    "somefield": {
+      "type": "integer"
+    }
+  },
+  "required": [
+    "metadata"
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_collapse/child.json 0.45.0-1/tests/data/jsonschema/external_collapse/child.json
--- 0.26.4-3/tests/data/jsonschema/external_collapse/child.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_collapse/child.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,4 @@
+{
+    "$schema": "http://json-schema.org/draft/2019-09/schema#",
+    "type": "string"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/external_collapse/parent.json 0.45.0-1/tests/data/jsonschema/external_collapse/parent.json
--- 0.26.4-3/tests/data/jsonschema/external_collapse/parent.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_collapse/parent.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+{
+    "$schema": "http://json-schema.org/draft/2019-09/schema#",
+    "properties": {
+        "item":  {"$ref": "child.json"}
+    }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/external_definitions.json 0.45.0-1/tests/data/jsonschema/external_definitions.json
--- 0.26.4-3/tests/data/jsonschema/external_definitions.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_definitions.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "elegantName": {
+        "type": "string",
+        "minLength": 3
+    }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_definitions_root.json 0.45.0-1/tests/data/jsonschema/external_definitions_root.json
--- 0.26.4-3/tests/data/jsonschema/external_definitions_root.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_definitions_root.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "title": "Person",
+    "type": "object",
+    "additionalProperties": false,
+    "properties": {
+        "name": {
+            "$ref": "external_definitions.json#/elegantName"
+        }
+    },
+    "required": [
+        "name"
+    ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/drink/coffee.json 0.45.0-1/tests/data/jsonschema/external_files_in_directory/definitions/drink/coffee.json
--- 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/drink/coffee.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_files_in_directory/definitions/drink/coffee.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "$id": "https://example.com/external_files_in_directory/definitions/drink/coffee.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Coffee",
+  "type": "string",
+    "enum": [
+        "Black",
+        "Espresso"
+    ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/drink/tea.json 0.45.0-1/tests/data/jsonschema/external_files_in_directory/definitions/drink/tea.json
--- 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/drink/tea.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_files_in_directory/definitions/drink/tea.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "$id": "https://example.com/external_files_in_directory/definitions/drinnk/tea.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Tea",
+  "type": "string",
+    "enum": [
+        "Oolong",
+        "Green"
+    ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/food.json 0.45.0-1/tests/data/jsonschema/external_files_in_directory/definitions/food.json
--- 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/food.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_files_in_directory/definitions/food.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+{
+  "$id": "https://example.com/external_files_in_directory/definitions/drink/food.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "noodle": {
+      "type": "string",
+      "enum": [
+        "ramen",
+        "spaghetti"
+      ]
+    },
+    "soup": {
+      "type": "string",
+      "enum": [
+        "bean",
+        "mushroom",
+        "tomato"
+      ]
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/friends.json 0.45.0-1/tests/data/jsonschema/external_files_in_directory/definitions/friends.json
--- 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/friends.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_files_in_directory/definitions/friends.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+{
+  "$id": "https://example.com/external_files_in_directory/definitions/friends.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Friends",
+  "type": "array",
+  "items": {
+    "properties": {
+      "name": {
+        "type": "string",
+        "example": "John Doe"
+      },
+      "phone_number": {
+        "type": "string",
+        "example": "(555) 555-1234"
+      },
+     "food": {
+      "type": "array",
+      "items": [
+        {
+          "$ref": "food.json#/definitions/noodle"
+        },
+        {
+          "$ref": "food.json#/definitions/soup"
+        }
+      ]
+    }
+    },
+    "additionalProperties": true,
+    "required": [
+      "name"
+    ]
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/machine/robot.json 0.45.0-1/tests/data/jsonschema/external_files_in_directory/definitions/machine/robot.json
--- 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/machine/robot.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_files_in_directory/definitions/machine/robot.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+{
+  "$id": "https://example.com/external_files_in_directory/definitions/machine/robot.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Robot",
+  "type": "object",
+  "allOf": [{ "$ref": "../relative/animal/pet/pet.json"}],
+  "properties": {
+    "friends": {
+     "$ref": "../../person.json"
+    },
+    "drink": {
+      "$ref": "../drink/coffee.json#"
+    },
+    "food": {
+      "$ref": "../food.json#/definitions/noodle"
+    },
+    "pet": {
+      "$ref": "../relative/animal/pet/pet.json"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/relative/animal/fur.json 0.45.0-1/tests/data/jsonschema/external_files_in_directory/definitions/relative/animal/fur.json
--- 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/relative/animal/fur.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_files_in_directory/definitions/relative/animal/fur.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "$id": "https://example.com/external_files_in_directory/definitions/relative/animal/fur.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Fur",
+  "type": "string",
+    "enum": [
+        "Short hair",
+        "Long hair"
+    ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/relative/animal/pet/pet.json 0.45.0-1/tests/data/jsonschema/external_files_in_directory/definitions/relative/animal/pet/pet.json
--- 0.26.4-3/tests/data/jsonschema/external_files_in_directory/definitions/relative/animal/pet/pet.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_files_in_directory/definitions/relative/animal/pet/pet.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+{
+  "$id": "https://example.com/external_files_in_directory/definitions/relative/animal/pet/pet.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Pet",
+  "type": "object",
+  "properties": {
+    "name": {
+      "type": "string"
+    },
+    "age": {
+      "type": "integer"
+    },
+    "fur": {
+      "$ref": "../fur.json"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_files_in_directory/person.json 0.45.0-1/tests/data/jsonschema/external_files_in_directory/person.json
--- 0.26.4-3/tests/data/jsonschema/external_files_in_directory/person.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_files_in_directory/person.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,64 @@
+{
+  "$id": "https://example.com/external_files_in_directory/person.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Person",
+  "type": "object",
+  "properties": {
+    "first_name": {
+      "type": "string",
+      "description": "The person's first name."
+    },
+    "last_name": {
+      "type": "string",
+      "description": "The person's last name."
+    },
+    "age": {
+      "description": "Age in years.",
+      "type": "integer",
+      "minimum": 0
+    },
+    "pets": {
+      "type": "array",
+      "items": [
+        {
+          "$ref": "definitions/relative/animal/pet/pet.json#"
+        }
+      ]
+    },
+    "friends": {
+      "$ref": "definitions/friends.json#"
+    },
+    "robot": {
+      "$ref": "./definitions/machine/robot.json"
+    },
+    "comment": {
+      "type": "null"
+    },
+    "drink": {
+      "type": "array",
+      "items": [
+        {
+          "$ref": "definitions/drink/coffee.json"
+        },
+        {
+          "$ref": "definitions/drink/tea.json#/"
+        }
+      ]
+    },
+    "food": {
+      "type": "array",
+      "items": [
+        {
+          "$ref": "definitions/food.json#/definitions/noodle"
+        },
+        {
+          "$ref": "definitions/food.json#/definitions/soup"
+        }
+      ]
+    }
+  },
+  "required": [
+      "first_name",
+      "last_name"
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_parent.json 0.45.0-1/tests/data/jsonschema/external_parent.json
--- 0.26.4-3/tests/data/jsonschema/external_parent.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_parent.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,51 @@
+{
+  "type": "object",
+  "properties": {
+    "firstName": {
+      "type": "string"
+    },
+    "childYaml": {
+      "$ref": "external_child.yaml#/Yaml"
+    },
+    "childJson": {
+      "$ref": "external_child.json#/Json"
+    },
+    "childDuplicateJson": {
+      "$ref": "external_child.json#/Json"
+    },
+    "childLocal": {
+      "$ref": "#Local"
+    },
+    "nested": {
+        "items": [
+            {
+              "childLocal": {
+                "$ref": "#Local"
+              }
+            }
+        ]
+    }
+  },
+  "items": {
+    "childLocal": {
+           "$ref": "#Local"
+    }
+  },
+  "additionalProperties": {
+        "childLocal": {
+           "$ref": "#Local"
+    }
+  },
+  "anyOf": [
+    {    "childLocal": {
+           "$ref": "#Local"
+        }
+    }
+  ],
+  "allOf": [
+    {    "childLocal": {
+           "$ref": "#Local"
+        }
+    }
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_parent_root.json 0.45.0-1/tests/data/jsonschema/external_parent_root.json
--- 0.26.4-3/tests/data/jsonschema/external_parent_root.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_parent_root.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/schema#",
+  "type": "object",
+  "title": "Object",
+  "properties": {
+    "metadata": {
+      "$ref": "external_child_root.json#/"
+    }
+  },
+  "required": [
+    "metadata"
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/external_reference/other/ref1.json 0.45.0-1/tests/data/jsonschema/external_reference/other/ref1.json
--- 0.26.4-3/tests/data/jsonschema/external_reference/other/ref1.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_reference/other/ref1.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+{
+    "$schema": "https://json-schema.org/draft/2020-12/schema",
+    "anyOf": [
+        {
+            "$ref": "ref2.json#/"
+        },
+        {"type": "null"}
+    ]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/external_reference/other/ref2.json 0.45.0-1/tests/data/jsonschema/external_reference/other/ref2.json
--- 0.26.4-3/tests/data/jsonschema/external_reference/other/ref2.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_reference/other/ref2.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "$schema": "https://json-schema.org/draft/2020-12/schema",
+    "title": "Other",
+    "properties": {
+        "key": {"type": "string"}
+    }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/external_reference/ref0.json 0.45.0-1/tests/data/jsonschema/external_reference/ref0.json
--- 0.26.4-3/tests/data/jsonschema/external_reference/ref0.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_reference/ref0.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+{
+    "$schema": "https://json-schema.org/draft/2020-12/schema",
+    "properties": {
+        "ref1": {
+            "$ref": "ref1.json#/"
+        },
+        "other_ref1": {
+            "$ref": "other/ref1.json#/"
+        }
+    }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/external_reference/ref1.json 0.45.0-1/tests/data/jsonschema/external_reference/ref1.json
--- 0.26.4-3/tests/data/jsonschema/external_reference/ref1.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_reference/ref1.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+{
+    "$schema": "https://json-schema.org/draft/2020-12/schema",
+    "anyOf": [
+        {
+            "$ref": "ref2.json#/"
+        },
+        {"type": "null"}
+    ]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/external_reference/ref2.json 0.45.0-1/tests/data/jsonschema/external_reference/ref2.json
--- 0.26.4-3/tests/data/jsonschema/external_reference/ref2.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/external_reference/ref2.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+{
+    "$schema": "https://json-schema.org/draft/2020-12/schema",
+    "properties": {
+        "key": {"type": "string"}
+    }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/extra_data_msgspec.json 0.45.0-1/tests/data/jsonschema/extra_data_msgspec.json
--- 0.26.4-3/tests/data/jsonschema/extra_data_msgspec.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/extra_data_msgspec.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "#all#": {
+        "base_class_kwargs": {
+            "omit_defaults": true
+        }
+    }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/extra_fields.json 0.45.0-1/tests/data/jsonschema/extra_fields.json
--- 0.26.4-3/tests/data/jsonschema/extra_fields.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/extra_fields.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,36 @@
+{
+  "title": "Test",
+  "type": "object",
+  "required": [
+    "foo"
+  ],
+  "properties": {
+    "foo": {
+      "type": "object",
+      "properties": {
+        "x": {
+          "type": "integer"
+        }
+      },
+      "additionalProperties": true
+    },
+    "bar": {
+      "type": "object",
+      "properties": {
+        "y": {
+          "type": "integer"
+        }
+      },
+      "additionalProperties": false
+    },
+    "baz": {
+      "type": "object",
+      "properties": {
+        "z": {
+          "type": "integer"
+        }
+      }
+    }
+  },
+  "additionalProperties": false
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/extras.json 0.45.0-1/tests/data/jsonschema/extras.json
--- 0.26.4-3/tests/data/jsonschema/extras.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/extras.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Extras",
+  "type": "object",
+  "properties": {
+    "name": {
+      "type": "string",
+      "description": "normal key",
+      "key1": 123,
+      "key2": 456,
+      "$exclude": 123,
+      "invalid-key-1": "abc",
+      "-invalid+key_2": "efg",
+      "$comment": "comment",
+      "$id": "#name",
+      "register": "hij",
+      "schema": "klm",
+      "x-repr": true,
+      "x-abc": true,
+      "example": "example",
+      "readOnly": true
+    },
+    "age": {
+      "type": "integer",
+      "example": 12,
+      "writeOnly": true,
+      "examples": [
+        13,
+        20
+      ]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/extras_in_oneof.json 0.45.0-1/tests/data/jsonschema/extras_in_oneof.json
--- 0.26.4-3/tests/data/jsonschema/extras_in_oneof.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/extras_in_oneof.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,49 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema#",
+    "id": "extras_in_oneof",
+    "title": "ExtrasInOneOf",
+    "type": "object",
+    "properties": {
+        "simple_prop": {"type": "string", "x-custom": "simple_value"},
+        "oneof_prop": {
+            "x-parent-custom": "parent_value",
+            "properties": {
+                "shared_prop": {"type": "string", "x-shared": "shared_value"}
+            },
+            "oneOf": [
+                {
+                    "title": "VariantA",
+                    "type": "object",
+                    "properties": {
+                        "variant_a_prop": {"type": "string", "x-variant": "variant_a_value"}
+                    }
+                },
+                {
+                    "title": "VariantB",
+                    "type": "object",
+                    "properties": {
+                        "variant_b_prop": {"type": "integer", "x-variant": "variant_b_value"}
+                    }
+                }
+            ]
+        },
+        "anyof_prop": {
+            "anyOf": [
+                {
+                    "title": "AnyVariantA",
+                    "type": "object",
+                    "properties": {
+                        "any_a_prop": {"type": "string", "x-any": "any_a_value"}
+                    }
+                },
+                {
+                    "title": "AnyVariantB",
+                    "type": "object",
+                    "properties": {
+                        "any_b_prop": {"type": "boolean", "x-any": "any_b_value"}
+                    }
+                }
+            ]
+        }
+    }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/field_has_same_name.json 0.45.0-1/tests/data/jsonschema/field_has_same_name.json
--- 0.26.4-3/tests/data/jsonschema/field_has_same_name.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/field_has_same_name.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "title": "Test",
+  "type": "object",
+  "properties": {
+    "TestObject": {
+      "title": "TestObject",
+      "type": "object",
+      "properties": {
+        "test_string": {
+          "type": "string"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/field_name_shadows_class_name.json 0.45.0-1/tests/data/jsonschema/field_name_shadows_class_name.json
--- 0.26.4-3/tests/data/jsonschema/field_name_shadows_class_name.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/field_name_shadows_class_name.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,30 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Elem": {
+      "type": "object",
+      "properties": {
+        "temp": {
+          "type": "string"
+        }
+      },
+      "required": ["temp"]
+    }
+  },
+  "type": "object",
+  "properties": {
+    "Elem": {
+      "anyOf": [
+        {
+          "type": "array",
+          "items": {
+            "$ref": "#/definitions/Elem"
+          }
+        },
+        {
+          "type": "null"
+        }
+      ]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/forwarding_reference/commons.json 0.45.0-1/tests/data/jsonschema/forwarding_reference/commons.json
--- 0.26.4-3/tests/data/jsonschema/forwarding_reference/commons.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/forwarding_reference/commons.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "$defs": {
+    "customArray": { "type": "array" }
+  },
+  "$schema": "https://json-schema.org/draft/2020-12/schema"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/forwarding_reference/forwarding.json 0.45.0-1/tests/data/jsonschema/forwarding_reference/forwarding.json
--- 0.26.4-3/tests/data/jsonschema/forwarding_reference/forwarding.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/forwarding_reference/forwarding.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "$defs": {
+    "forwardingArray": {"$ref": "commons.json#/$defs/customArray"}
+  },
+  "$schema": "https://json-schema.org/draft/2020-12/schema"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/forwarding_reference/schema.json 0.45.0-1/tests/data/jsonschema/forwarding_reference/schema.json
--- 0.26.4-3/tests/data/jsonschema/forwarding_reference/schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/forwarding_reference/schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "$defs": {
+    "pets": {"$ref": "forwarding.json#/$defs/forwardingArray"}
+  },
+  "$schema": "https://json-schema.org/draft/2020-12/schema"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/has_default_value.json 0.45.0-1/tests/data/jsonschema/has_default_value.json
--- 0.26.4-3/tests/data/jsonschema/has_default_value.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/has_default_value.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,78 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Person",
+  "type": "object",
+  "definitions": {
+    "teamType": {
+      "description": "Person team",
+      "type": "string",
+      "enum": [
+        "Department",
+        "Division",
+        "BusinessUnit",
+        "Organization"
+      ],
+      "default": "Department"
+    },
+    "ID": {
+      "type": "string",
+      "default": "abc"
+    },
+    "Pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string"
+        }
+      },
+      "default": {
+        "name": "ken"
+      }
+    },
+    "Family": {
+      "type": "array",
+      "items": {
+        "$ref": "#/definitions/ID"
+      },
+      "default": [
+        "abc",
+        "efg"
+      ]
+    },
+    "FamilyPets": {
+      "type": "array",
+      "items": {
+        "$ref": "#/definitions/Pet"
+      },
+      "default": [
+        "taro",
+        "shiro"
+      ]
+    }
+  },
+  "properties": {
+    "id": {
+      "$ref": "#/definitions/ID"
+    },
+    "user": {
+      "$ref": "#/definitions/Pet"
+    },
+    "firstName": {
+      "type": "string",
+      "description": "The person's first name."
+    },
+    "team": {
+      "$ref": "#/definitions/teamType"
+    },
+    "anotherTeam": {
+      "$ref": "#/definitions/teamType",
+      "default": "Department"
+    },
+    "Family": {
+      "$ref": "#/definitions/Family"
+    },
+    "FamilyPets": {
+      "$ref": "#/definitions/FamilyPets"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/hierarchical_aliases.json 0.45.0-1/tests/data/jsonschema/hierarchical_aliases.json
--- 0.26.4-3/tests/data/jsonschema/hierarchical_aliases.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/hierarchical_aliases.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,34 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "title": "Root",
+  "properties": {
+    "name": {
+      "type": "string"
+    },
+    "user": {
+      "type": "object",
+      "title": "User",
+      "properties": {
+        "name": {
+          "type": "string"
+        },
+        "id": {
+          "type": "integer"
+        }
+      }
+    },
+    "address": {
+      "type": "object",
+      "title": "Address",
+      "properties": {
+        "name": {
+          "type": "string"
+        },
+        "city": {
+          "type": "string"
+        }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/id.json 0.45.0-1/tests/data/jsonschema/id.json
--- 0.26.4-3/tests/data/jsonschema/id.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/id.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+
+  "definitions": {
+    "address": {
+      "$id": "#address",
+      "type": "object",
+      "properties": {
+        "street_address": { "type": "string" },
+        "city":           { "type": "string" },
+        "state":          { "type": "string" }
+      },
+      "required": ["street_address", "city", "state"]
+    }
+  },
+
+  "type": "object",
+
+  "properties": {
+    "billing_address": { "$ref": "#address" },
+    "shipping_address": { "$ref": "#address" }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/ids/ContactPoint.schema.json 0.45.0-1/tests/data/jsonschema/ids/ContactPoint.schema.json
--- 0.26.4-3/tests/data/jsonschema/ids/ContactPoint.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/ids/ContactPoint.schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "ContactPoint.schema.json",
+    "title": "ContactPoint",
+    "description": "A contact point—for example, a Customer Complaints department.",
+    "type": "object",
+    "required": [
+        "type",
+        "email"
+    ],
+    "properties": {
+        "type": {
+            "$ref": "type.schema.json",
+            "enum": [
+                "ContactPoint"
+            ]
+        },
+        "contactType": {
+            "type": "string"
+        },
+        "email": {
+            "type": "string",
+            "format": "email"
+        },
+        "telephone": {
+            "type": "string"
+        }
+    }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/ids/Organization.schema.json 0.45.0-1/tests/data/jsonschema/ids/Organization.schema.json
--- 0.26.4-3/tests/data/jsonschema/ids/Organization.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/ids/Organization.schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,42 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "Organization.schema.json",
+    "title": "Organization",
+    "description": "An organization such as a school, NGO, corporation, club, etc.",
+    "type": "object",
+    "required": [
+        "type",
+        "name"
+    ],
+    "properties": {
+        "id": {
+            "$ref": "id.schema.json"
+        },
+        "type": {
+            "$ref": "type.schema.json",
+            "enum": [
+                "Organization"
+            ]
+        },
+        "name": {
+            "$ref": "name.schema.json"
+        },
+        "contactPoint": {
+            "$ref": "ContactPoint.schema.json"
+        },
+        "sameAs": {
+            "$ref": "sameAs.schema.json"
+        },
+        "url": {
+            "$ref": "URI.schema.json"
+        }
+    },
+    "examples": [
+        {
+            "id": "https://ror.org/02a809t02",
+            "type": "Organization",
+            "name": "Vizzuality",
+            "sameAs": "https://ror.org/02a809t02"
+        }
+    ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/ids/URI.schema.json 0.45.0-1/tests/data/jsonschema/ids/URI.schema.json
--- 0.26.4-3/tests/data/jsonschema/ids/URI.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/ids/URI.schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "URI.schema.json",
+    "title": "URI",
+    "description": "String representing a URI.",
+    "type": "string",
+    "format": "uri"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/ids/context.schema.json 0.45.0-1/tests/data/jsonschema/ids/context.schema.json
--- 0.26.4-3/tests/data/jsonschema/ids/context.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/ids/context.schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "context.schema.json",
+    "title": "context",
+    "description": "A URL that provides descriptions of this objects properties. TODO: Align with full JSON-LD context definition!",
+    "type": "string"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/ids/id.schema.json 0.45.0-1/tests/data/jsonschema/ids/id.schema.json
--- 0.26.4-3/tests/data/jsonschema/ids/id.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/ids/id.schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "id.schema.json",
+    "title": "id",
+    "description": "Identifier string of this object.",
+    "type": "string"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/ids/name.schema.json 0.45.0-1/tests/data/jsonschema/ids/name.schema.json
--- 0.26.4-3/tests/data/jsonschema/ids/name.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/ids/name.schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "name.schema.json",
+    "title": "name",
+    "description": "A descriptive (full) name of the entity. For example, a dataset called 'Snow depth in the Northern Hemisphere' or a person called 'Sarah L. Jones' or a place called 'The Empire States Building'. Use unique names for distinct entities whenever possible.",
+    "type": "string"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/ids/sameAs.schema.json 0.45.0-1/tests/data/jsonschema/ids/sameAs.schema.json
--- 0.26.4-3/tests/data/jsonschema/ids/sameAs.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/ids/sameAs.schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "sameAs.schema.json",
+    "title": "sameAs",
+    "description": "Use the sameAs property to indicate the most canonical URLs for the original in cases of the entity. For example this may be a link to the original metadata of a dataset, definition of a property, Person, Organization or Place.",
+    "$ref": "URI.schema.json"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/ids/schema.schema.json 0.45.0-1/tests/data/jsonschema/ids/schema.schema.json
--- 0.26.4-3/tests/data/jsonschema/ids/schema.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/ids/schema.schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "schema.schema.json",
+    "title": "schema",
+    "description": "URI of the JSON schema of this object.",
+    "type": "string"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/ids/type.schema.json 0.45.0-1/tests/data/jsonschema/ids/type.schema.json
--- 0.26.4-3/tests/data/jsonschema/ids/type.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/ids/type.schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema",
+    "$id": "type.schema.json",
+    "type": "string",
+    "title": "type",
+    "description": "Type of this object."
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/ignore_pyproject_example.json 0.45.0-1/tests/data/jsonschema/ignore_pyproject_example.json
--- 0.26.4-3/tests/data/jsonschema/ignore_pyproject_example.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/ignore_pyproject_example.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "firstName": {"type": "string"},
+    "lastName": {"type": "string"}
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/imports_correct/schema.json 0.45.0-1/tests/data/jsonschema/imports_correct/schema.json
--- 0.26.4-3/tests/data/jsonschema/imports_correct/schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/imports_correct/schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+{
+  "properties": {
+    "inner": {
+      "discriminator": {
+        "mapping": {
+          "a": "./type_1.json",
+          "A": "./type_1.json"
+        },
+        "propertyName": "type_"
+      },
+      "oneOf": [
+        {
+          "$ref": "./type_1.json"
+        }
+      ],
+      "title": "Inner"
+    }
+  },
+  "required": [
+    "inner"
+  ],
+  "title": "Response",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/imports_correct/type_1.json 0.45.0-1/tests/data/jsonschema/imports_correct/type_1.json
--- 0.26.4-3/tests/data/jsonschema/imports_correct/type_1.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/imports_correct/type_1.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "properties": {
+    "type_": {
+      "default": "a",
+      "enum": ["a", "A"],
+
+      "type": "string",
+      "title": "Type"
+    }
+  },
+  "title": "Type1",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/inheritance_forward_ref.json 0.45.0-1/tests/data/jsonschema/inheritance_forward_ref.json
--- 0.26.4-3/tests/data/jsonschema/inheritance_forward_ref.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/inheritance_forward_ref.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,100 @@
+{
+    "title": "PersonsBestFriend",
+    "description": "This is the main model.",
+    "type": "object",
+    "properties": {
+      "people": {
+        "title": "People",
+        "type": "array",
+        "items": {
+          "$ref": "#/definitions/Person"
+        }
+      },
+      "dogs": {
+        "title": "Dogs",
+        "type": "array",
+        "items": {
+          "$ref": "#/definitions/Dog"
+        }
+      },
+      "dog_base": {
+        "$ref": "#/definitions/DogBase"
+      },
+      "dog_relationships": {
+        "$ref": "#/definitions/DogRelationships"
+      },
+      "person_base": {
+        "$ref": "#/definitions/PersonBase"
+      },
+      "person_relationships": {
+        "$ref": "#/definitions/PersonRelationships"
+      }
+    },
+    "definitions": {
+      "Person": {
+        "title": "Person",
+        "allOf": [
+            {"$ref": "#/definitions/PersonBase"},
+            {"$ref": "#/definitions/PersonRelationships"}
+        ]
+      },
+      "Dog": {
+        "title": "Dog",
+        "allOf": [
+            {"$ref": "#/definitions/DogBase"},
+            {"$ref": "#/definitions/DogRelationships"}
+        ]
+      },
+      "DogBase": {
+        "title": "DogBase",
+        "type": "object",
+        "properties": {
+          "name": {
+            "title": "Name",
+            "type": "string"
+          },
+          "woof": {
+            "title": "Woof",
+            "default": true,
+            "type": "boolean"
+          }
+        }
+      },
+      "DogRelationships": {
+        "title": "DogRelationships",
+        "type": "object",
+        "properties": {
+          "people": {
+            "title": "People",
+            "type": "array",
+            "items": {
+              "$ref": "#/definitions/Person"
+            }
+          }
+        }
+      },
+      "PersonBase": {
+        "title": "PersonBase",
+        "type": "object",
+        "properties": {
+          "name": {
+            "title": "Name",
+            "type": "string"
+          }
+        }
+      },
+      "PersonRelationships": {
+        "title": "PersonRelationships",
+        "type": "object",
+        "properties": {
+          "people": {
+            "title": "People",
+            "type": "array",
+            "items": {
+              "$ref": "#/definitions/Person"
+            }
+          }
+        }
+      }
+    }
+  }
diff -pruN 0.26.4-3/tests/data/jsonschema/invalid_enum_name.json 0.45.0-1/tests/data/jsonschema/invalid_enum_name.json
--- 0.26.4-3/tests/data/jsonschema/invalid_enum_name.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/invalid_enum_name.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "InvalidEnum",
+  "type": "string",
+  "enum": [
+    "1 value",
+    " space",
+    "*- special",
+    "schema",
+    "MRO",
+    "mro"
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/invalid_import_name/array-commons.schema.json 0.45.0-1/tests/data/jsonschema/invalid_import_name/array-commons.schema.json
--- 0.26.4-3/tests/data/jsonschema/invalid_import_name/array-commons.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/invalid_import_name/array-commons.schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "$id": "https://example.com/array-commons.schema.json",
+  "title": "Commons",
+  "description": "Commons objects",
+  "$defs": {
+    "defaultArray": {
+      "type": "array",
+      "minLength": 1,
+      "maxLength": 100
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/invalid_import_name/products.schema.json 0.45.0-1/tests/data/jsonschema/invalid_import_name/products.schema.json
--- 0.26.4-3/tests/data/jsonschema/invalid_import_name/products.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/invalid_import_name/products.schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+{
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "$id": "https://example.com/products.schema.json",
+  "title": "Products",
+  "description": "The products in the catalog",
+  "$ref": "array-commons.schema.json#/$defs/defaultArray"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/invalid_model_name.json 0.45.0-1/tests/data/jsonschema/invalid_model_name.json
--- 0.26.4-3/tests/data/jsonschema/invalid_model_name.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/invalid_model_name.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "1 xyz",
+  "type": "object",
+  "properties": {
+    "firstName": {
+      "type": "string",
+      "description": "The person's first name."
+    },
+    "lastName": {
+      "type": "string",
+      "description": "The person's last name."
+    },
+    "age": {
+      "description": "Age in years which must be equal to or greater than zero.",
+      "type": "integer",
+      "minimum": 0
+    },
+    "friends": {
+      "type": "array"
+    },
+    "comment": {
+      "type": "null"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/items_boolean.json 0.45.0-1/tests/data/jsonschema/items_boolean.json
--- 0.26.4-3/tests/data/jsonschema/items_boolean.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/items_boolean.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "example": {
+      "items": true
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/json_pointer.json 0.45.0-1/tests/data/jsonschema/json_pointer.json
--- 0.26.4-3/tests/data/jsonschema/json_pointer.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/json_pointer.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,72 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "title": "Person",
+    "type": "object",
+    "properties": {
+        "name": {
+            "title": "name",
+            "type": "string"
+        },
+        "pet": {
+            "title": "pet",
+            "type": "object",
+            "oneOf": [
+                {
+                    "$ref": "#/definitions/Pets/Cat"
+                },
+                {
+                    "$ref": "#/definitions/Pets/Dog"
+                }
+            ]
+        }
+    },
+    "definitions": {
+        "Pets": {
+            "Cat": {
+                "title": "Cat",
+                "type": "object",
+                "required": [
+                    "pet_type",
+                    "hunts",
+                    "age"
+                ],
+                "properties": {
+                    "pet_type": {
+                        "enum": [
+                            "Cat"
+                        ]
+                    },
+                    "hunts": {
+                        "type": "boolean"
+                    },
+                    "age": {
+                        "type": "string"
+                    }
+                }
+            },
+            "Dog": {
+                "title": "Dog",
+                "type": "object",
+                "required": [
+                    "pet_type",
+                    "bark",
+                    "breed"
+                ],
+                "properties": {
+                    "pet_type": {
+                        "enum": [
+                            "Dog"
+                        ]
+                    },
+                    "bark": {
+                        "type": "boolean"
+                    },
+                    "breed": {
+                        "type": "string"
+                    }
+                }
+            }
+        }
+    },
+    "additionalProperties": false
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/json_pointer_array.json 0.45.0-1/tests/data/jsonschema/json_pointer_array.json
--- 0.26.4-3/tests/data/jsonschema/json_pointer_array.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/json_pointer_array.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,62 @@
+
+ {
+  "$schema": "https://json-schema.org/draft/2019-09/schema",
+  "oneOf": [
+    {
+      "definitions": {
+        "email": {
+          "properties": {
+            "email": {
+              "format": "email",
+              "type": "string"
+            }
+          },
+          "required": [
+            "email"
+          ],
+          "type": "object"
+        }
+      },
+      "properties": {
+        "emails": {
+          "items": {
+            "$ref": "#/oneOf/0/definitions/email"
+          },
+          "type": "array"
+        }
+      },
+      "required": [
+        "emails"
+      ],
+      "type": "object"
+    },
+    {
+      "definitions": {
+        "error": {
+          "properties": {
+            "code": {
+              "type": "string"
+            }
+          },
+          "required": [
+            "code"
+          ],
+          "type": "object"
+        }
+      },
+      "properties": {
+        "errors": {
+          "items": {
+            "$ref": "#/oneOf/1/definitions/error"
+          },
+          "type": "array"
+        }
+      },
+      "required": [
+        "errors"
+      ],
+      "type": "object"
+    }
+  ],
+  "type": "object"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/keep_model_order_field_references.json 0.45.0-1/tests/data/jsonschema/keep_model_order_field_references.json
--- 0.26.4-3/tests/data/jsonschema/keep_model_order_field_references.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/keep_model_order_field_references.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "DescriptionType",
+  "type": "object",
+  "properties": {
+    "metadata": {
+      "type": "array",
+      "items": {
+        "$ref": "#/definitions/Metadata"
+      }
+    }
+  },
+  "definitions": {
+    "Metadata": {
+      "title": "Metadata",
+      "type": "object",
+      "properties": {
+        "title": {
+          "type": "string"
+        }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/long_description.json 0.45.0-1/tests/data/jsonschema/long_description.json
--- 0.26.4-3/tests/data/jsonschema/long_description.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/long_description.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "LongDescription",
+  "type": "object",
+  "properties": {
+    "summary": {
+      "type": "string",
+      "description": "summary for object"
+    },
+    "description": {
+      "type": "string",
+      "description": "datamodel-code-generator. This code generator creates pydantic model from an openapi file and others."
+    },
+    "multi_line": {
+      "description": "datamodel-code-generator\nThis code generator creates pydantic model from an openapi file and others.\n\n\nSupported source types\nOpenAPI 3 (YAML/JSON, OpenAPI Data Type)\nJSON Schema (JSON Schema Core/JSON Schema Validation)\nJSON/YAML/CSV Data (it will be converted to JSON Schema)\nPython dictionary (it will be converted to JSON Schema)",
+      "type": "string"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/many_case_enum.json 0.45.0-1/tests/data/jsonschema/many_case_enum.json
--- 0.26.4-3/tests/data/jsonschema/many_case_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/many_case_enum.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "string",
+  "enum": [
+    "snake_case",
+    "CAP_CASE",
+    "CamelCase",
+    "UPPERCASE"
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/missing_anchor.json 0.45.0-1/tests/data/jsonschema/missing_anchor.json
--- 0.26.4-3/tests/data/jsonschema/missing_anchor.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/missing_anchor.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,5 @@
+{
+  "title": "BrokenAnchor",
+  "type": "object",
+  "$ref": "#address"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/modular_default_enum_member/bar.json 0.45.0-1/tests/data/jsonschema/modular_default_enum_member/bar.json
--- 0.26.4-3/tests/data/jsonschema/modular_default_enum_member/bar.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/modular_default_enum_member/bar.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "$id": "bar.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Bar",
+  "type": "object",
+  "definitions": {
+    "logLevels": {
+      "description": "Supported logging levels",
+      "type": "string",
+      "enum": ["DEBUG", "INFO", "WARN", "ERROR"],
+      "default": "INFO"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/modular_default_enum_member/foo.json 0.45.0-1/tests/data/jsonschema/modular_default_enum_member/foo.json
--- 0.26.4-3/tests/data/jsonschema/modular_default_enum_member/foo.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/modular_default_enum_member/foo.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+{
+  "$id": "foo.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Foo",
+  "type": "object",
+  "properties": {
+    "loggerLevel": {
+      "$ref": "bar.json#/definitions/logLevels"
+    },
+    "AnotherLoggerLevel": {
+      "$ref": "nested_bar/bar.json#/definitions/logLevels"
+    },
+    "OtherLoggerLevels": {
+      "items": {
+        "$ref": "nested_bar/bar.json#/definitions/logLevels"
+      },
+      "default": ["INFO", "ERROR", "INVALID"]
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/modular_default_enum_member/nested_bar/bar.json 0.45.0-1/tests/data/jsonschema/modular_default_enum_member/nested_bar/bar.json
--- 0.26.4-3/tests/data/jsonschema/modular_default_enum_member/nested_bar/bar.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/modular_default_enum_member/nested_bar/bar.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "$id": "nested_bar/bar.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "NestedBar",
+  "type": "object",
+  "definitions": {
+    "logLevels": {
+      "description": "Supported logging levels Nested Model",
+      "type": "string",
+      "enum": ["DEBUG", "INFO", "ERROR"],
+      "default": "ERROR"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/module_split_single/input.json 0.45.0-1/tests/data/jsonschema/module_split_single/input.json
--- 0.26.4-3/tests/data/jsonschema/module_split_single/input.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/module_split_single/input.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "User": {
+      "type": "object",
+      "properties": {
+        "id": {"type": "integer"},
+        "name": {"type": "string"}
+      }
+    },
+    "Order": {
+      "type": "object",
+      "properties": {
+        "id": {"type": "integer"},
+        "user": {"$ref": "#/definitions/User"}
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/msgspec_falsy_defaults.json 0.45.0-1/tests/data/jsonschema/msgspec_falsy_defaults.json
--- 0.26.4-3/tests/data/jsonschema/msgspec_falsy_defaults.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/msgspec_falsy_defaults.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "zero_int": {
+      "type": "integer",
+      "default": 0
+    },
+    "empty_string": {
+      "type": "string",
+      "default": ""
+    },
+    "false_bool": {
+      "type": "boolean",
+      "default": false
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/msgspec_null_field.json 0.45.0-1/tests/data/jsonschema/msgspec_null_field.json
--- 0.26.4-3/tests/data/jsonschema/msgspec_null_field.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/msgspec_null_field.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "required_null": {
+      "type": "null"
+    },
+    "optional_null": {
+      "type": "null"
+    }
+  },
+  "required": ["required_null"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_files/file_a.json 0.45.0-1/tests/data/jsonschema/multiple_files/file_a.json
--- 0.26.4-3/tests/data/jsonschema/multiple_files/file_a.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/multiple_files/file_a.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/schema#",
+  "type": "object",
+  "title": "ModelA",
+  "properties": {
+    "firstName": {
+      "type": "string"
+    },
+    "modelB": {
+      "$ref": "file_b.json#"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_files/file_b.json 0.45.0-1/tests/data/jsonschema/multiple_files/file_b.json
--- 0.26.4-3/tests/data/jsonschema/multiple_files/file_b.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/multiple_files/file_b.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/schema#",
+  "type": "object",
+  "title": "ModelB",
+  "properties": {
+    "metadata": {
+      "type": "string"
+    }
+  },
+  "required": [
+    "metadata"
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_files/file_c.json 0.45.0-1/tests/data/jsonschema/multiple_files/file_c.json
--- 0.26.4-3/tests/data/jsonschema/multiple_files/file_c.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/multiple_files/file_c.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/schema#",
+  "type": "object",
+  "title": "ModelC",
+  "properties": {
+    "firstName": {
+      "type": "string"
+    },
+    "modelB": {
+      "$ref": "file_b.json#"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_files/file_d.json 0.45.0-1/tests/data/jsonschema/multiple_files/file_d.json
--- 0.26.4-3/tests/data/jsonschema/multiple_files/file_d.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/multiple_files/file_d.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "$schema": "http://json-schema.org/schema#",
+  "type": "object",
+  "title": "ModelD",
+  "properties": {
+    "firstName": {
+      "type": "string"
+    },
+    "modelA": {
+      "$ref": "file_a.json#"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_files_json_pointer/file_a.json 0.45.0-1/tests/data/jsonschema/multiple_files_json_pointer/file_a.json
--- 0.26.4-3/tests/data/jsonschema/multiple_files_json_pointer/file_a.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/multiple_files_json_pointer/file_a.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "title": "PersonA",
+    "type": "object",
+    "properties": {
+        "name": {
+            "title": "name",
+            "type": "string"
+        },
+        "pet": {
+            "title": "pet",
+            "type": "object",
+            "oneOf": [
+                {
+                    "$ref": "file_b.json#/definitions/Pets/Cat"
+                },
+                {
+                    "$ref": "file_b.json#definitions/Pets/Dog"
+                }
+            ]
+        }
+    },
+    "additionalProperties": false
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_files_json_pointer/file_b.json 0.45.0-1/tests/data/jsonschema/multiple_files_json_pointer/file_b.json
--- 0.26.4-3/tests/data/jsonschema/multiple_files_json_pointer/file_b.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/multiple_files_json_pointer/file_b.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,52 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "definitions": {
+        "Pets": {
+            "Cat": {
+                "title": "Cat",
+                "type": "object",
+                "required": [
+                    "pet_type",
+                    "hunts",
+                    "age"
+                ],
+                "properties": {
+                    "pet_type": {
+                        "enum": [
+                            "Cat"
+                        ]
+                    },
+                    "hunts": {
+                        "type": "boolean"
+                    },
+                    "age": {
+                        "type": "string"
+                    }
+                }
+            },
+            "Dog": {
+                "title": "Dog",
+                "type": "object",
+                "required": [
+                    "pet_type",
+                    "bark",
+                    "breed"
+                ],
+                "properties": {
+                    "pet_type": {
+                        "enum": [
+                            "Dog"
+                        ]
+                    },
+                    "bark": {
+                        "type": "boolean"
+                    },
+                    "breed": {
+                        "type": "string"
+                    }
+                }
+            }
+        }
+    },
+    "additionalProperties": false
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_files_json_pointer/file_c.json 0.45.0-1/tests/data/jsonschema/multiple_files_json_pointer/file_c.json
--- 0.26.4-3/tests/data/jsonschema/multiple_files_json_pointer/file_c.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/multiple_files_json_pointer/file_c.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "title": "PersonC",
+    "type": "object",
+    "properties": {
+        "name": {
+            "title": "name",
+            "type": "string"
+        },
+        "pet": {
+            "title": "pet",
+            "type": "object",
+            "oneOf": [
+                {
+                    "$ref": "file_b.json#/definitions/Pets/Cat"
+                },
+                {
+                    "$ref": "file_b.json#/definitions/Pets/Dog"
+                }
+            ]
+        }
+    },
+    "additionalProperties": false
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_files_self_ref/base_test.json 0.45.0-1/tests/data/jsonschema/multiple_files_self_ref/base_test.json
--- 0.26.4-3/tests/data/jsonschema/multiple_files_self_ref/base_test.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/multiple_files_self_ref/base_test.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "$id": "base_test.json",
+  "description": "test",
+  "type": "object",
+  "definitions": {
+    "first": {
+      "$ref": "#/definitions/second"
+    },
+    "second": {
+      "type": "string"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_files_self_ref/test.json 0.45.0-1/tests/data/jsonschema/multiple_files_self_ref/test.json
--- 0.26.4-3/tests/data/jsonschema/multiple_files_self_ref/test.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/multiple_files_self_ref/test.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "$id": "test.json",
+  "description": "test",
+  "type": "object",
+  "required": [
+    "test_id",
+    "test_ip",
+    "result"
+  ],
+  "properties": {
+    "test_id": {
+      "type": "string",
+      "description": "test ID"
+    },
+    "test_ip": {
+      "$ref": "base_test.json#/definitions/first"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/multiple_types_with_object.json 0.45.0-1/tests/data/jsonschema/multiple_types_with_object.json
--- 0.26.4-3/tests/data/jsonschema/multiple_types_with_object.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/multiple_types_with_object.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,35 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "TopLevelMultiType": {
+      "type": ["boolean", "object"],
+      "properties": {
+        "enabled": {
+          "type": "boolean"
+        }
+      }
+    }
+  },
+  "type": "object",
+  "properties": {
+    "external": {
+      "type": ["boolean", "object"],
+      "properties": {
+        "name": {
+          "type": "string"
+        }
+      }
+    },
+    "config": {
+      "type": ["null", "string", "object"],
+      "properties": {
+        "value": {
+          "type": "integer"
+        }
+      }
+    },
+    "top_level_ref": {
+      "$ref": "#/definitions/TopLevelMultiType"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/nested_all_of.json 0.45.0-1/tests/data/jsonschema/nested_all_of.json
--- 0.26.4-3/tests/data/jsonschema/nested_all_of.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/nested_all_of.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,40 @@
+{
+  "title": "Model",
+  "allOf": [
+    {
+      "type": "object",
+      "properties": {
+        "first": {
+          "type": "string"
+        }
+      },
+      "required": [
+        "first"
+      ]
+    },
+    {
+      "allOf": [
+        {
+          "properties": {
+            "second": {
+              "type": "string"
+            }
+          },
+          "required": [
+            "second"
+          ]
+        },
+        {
+          "properties": {
+            "third": {
+              "type": "string"
+            }
+          },
+          "required": [
+            "third"
+          ]
+        }
+      ]
+    }
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/nested_array.json 0.45.0-1/tests/data/jsonschema/nested_array.json
--- 0.26.4-3/tests/data/jsonschema/nested_array.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/nested_array.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,35 @@
+{
+  "type": "object",
+  "properties": {
+    "bounding_box": {
+      "type": "object",
+      "properties": {
+        "type": {
+          "type": "string"
+        },
+        "coordinates": {
+          "type": "array",
+          "items": {
+            "type": "array",
+            "items": {
+              "type": "array",
+              "items": [{
+                "type": "number"
+              },
+              {
+                "type": "string"
+              }]
+            }
+          }
+        }
+      },
+      "required": [
+        "coordinates",
+        "type"
+      ]
+    },
+    "attributes": {
+      "type": "object"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/nested_array.json.snapshot 0.45.0-1/tests/data/jsonschema/nested_array.json.snapshot
--- 0.26.4-3/tests/data/jsonschema/nested_array.json.snapshot	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/nested_array.json.snapshot	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+class BoundingBox(BaseModel):
+    type: str
+    coordinates: List[List[List[Union[float, str]]]]
+
+
+class Model(BaseModel):
+    bounding_box: Optional[BoundingBox] = None
+    attributes: Optional[Dict[str, Any]] = None
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/nested_json_pointer.json 0.45.0-1/tests/data/jsonschema/nested_json_pointer.json
--- 0.26.4-3/tests/data/jsonschema/nested_json_pointer.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/nested_json_pointer.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,142 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+    "title": "Person",
+    "type": "object",
+    "properties": {
+        "name": {
+            "title": "name",
+            "type": "string"
+        },
+        "pet": {
+            "title": "pet",
+            "type": "object",
+            "oneOf": [
+                {
+                    "$ref": "#/definitions/Pets/Cat"
+                },
+                {
+                    "$ref": "#/definitions/Pets/Dog"
+                }
+            ]
+        }
+    },
+    "definitions": {
+        "CatBreed":{
+            "C1":
+            {
+                "title":"C1",
+                "type": "object",
+                "properties":
+                {
+                    "hunts": {
+                        "type": "boolean"
+                    },
+                    "age": {
+                        "type": "string"
+                    }
+                }
+            },
+            "C2":
+            {
+                "title":"C2",
+                "type": "object",
+                "properties":
+                {
+                    "hunts": {
+                        "type": "boolean"
+                    },
+                    "age": {
+                        "type": "string"
+                    }
+                }
+            }
+        },
+        "DogBreed":{
+            "D1":
+            {
+                "title":"D1",
+                "type": "object",
+                "properties":
+                {
+                    "bark": {
+                        "type": "boolean"
+                    },
+                    "age": {
+                        "type": "string"
+                    }
+                }
+            },
+            "D2":
+            {
+                "title":"D2",
+                "type": "object",
+                "properties":
+                {
+                    "hunts": {
+                        "type": "boolean"
+                    },
+                    "age": {
+                        "type": "string"
+                    }
+                }
+            }
+        },
+        "Pets": {
+            "Cat": {
+                "title": "Cat",
+                "type": "object",
+                "required": [
+                    "pet_type",
+                    "age"
+                ],
+                "properties": {
+                    "pet_type": {
+                        "enum": [
+                            "Cat"
+                        ]
+                    },
+                    "breed": {
+                        "title": "breed",
+                        "type": "object",
+                        "oneOf": [
+                            {
+                                "$ref": "#/definitions/CatBreed/C1"
+                            },
+                            {
+                                "$ref": "#/definitions/CatBreed/C2"
+                            }
+                        ]
+                    }
+                }
+            },
+            "Dog": {
+                "title": "Dog",
+                "type": "object",
+                "required": [
+                    "pet_type",
+                    "breed"
+                ],
+                "properties": {
+                    "pet_type": {
+                        "enum": [
+                            "Dog"
+                        ]
+                    },
+                    "breed": {
+                        "title": "breed",
+                        "type": "string",
+                        "oneOf": [
+                            {
+                                "$ref": "#/definitions/DogBreed/D1"
+                            },
+                            {
+                                "$ref": "#/definitions/DogBreed/D2"
+                            }
+                        ]
+                    }
+                }
+            }
+        }
+    },
+    "additionalProperties": false
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/nested_person.json 0.45.0-1/tests/data/jsonschema/nested_person.json
--- 0.26.4-3/tests/data/jsonschema/nested_person.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/nested_person.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,42 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "NestedPerson",
+  "type": "object",
+  "properties": {
+    "nested_deep_childJson": {
+      "$ref": "#/definitions/nested.deep.Json"
+    },
+    "nested_deep_childAnother": {
+      "$ref": "#/definitions/nested.deep.Another"
+    },
+    "empty_parent_nested_deep_childJson": {
+      "$ref": "#/definitions/empty_parent.nested.deep.Json"
+    }
+  },
+  "definitions": {
+    "nested.deep.Json": {
+      "type": "object",
+      "properties": {
+        "firstName": {
+          "type": "string"
+        }
+      }
+    },
+    "nested.deep.Another": {
+      "type": "object",
+      "properties": {
+        "firstName": {
+          "type": "string"
+        }
+      }
+    },
+    "empty_parent.nested.deep.Json": {
+      "type": "object",
+      "properties": {
+        "firstName": {
+          "type": "string"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/nested_skip.json 0.45.0-1/tests/data/jsonschema/nested_skip.json
--- 0.26.4-3/tests/data/jsonschema/nested_skip.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/nested_skip.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "NestedSkip",
+  "type": "object",
+  "definitions": {
+    "a.b.c.d.e": {
+      "type": "object",
+      "properties": {
+        "example1": {
+          "type": "string"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/no_alias.json 0.45.0-1/tests/data/jsonschema/no_alias.json
--- 0.26.4-3/tests/data/jsonschema/no_alias.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/no_alias.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Person",
+  "type": "object",
+  "properties": {
+    "first-name": {
+      "type": "string"
+    },
+    "last-name": {
+      "type": "string"
+    },
+    "email_address": {
+      "type": "string"
+    }
+  },
+  "required": ["first-name", "last-name"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/not_required_nullable.json 0.45.0-1/tests/data/jsonschema/not_required_nullable.json
--- 0.26.4-3/tests/data/jsonschema/not_required_nullable.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/not_required_nullable.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Person",
+  "type": "object",
+  "properties": {
+    "name": {
+      "type": "string"
+    },
+    "null_name": {
+      "type": ["string", "null"]
+    },
+    "age": {
+      "type": "integer"
+    },
+    "null_age": {
+      "type": ["integer", "null"]
+    }
+  },
+  "required": ["name", "null_age"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/null.json 0.45.0-1/tests/data/jsonschema/null.json
--- 0.26.4-3/tests/data/jsonschema/null.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/null.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema",
+  "type": "object",
+  "properties": {
+    "null": {
+      "type": "null"
+    },
+    "nullableString": {
+      "type": ["null", "string"]
+    },
+    "nullableNumber":{
+      "type": ["null", "number", "integer"]
+    },
+    "any": {
+    },
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/null_and_array.json 0.45.0-1/tests/data/jsonschema/null_and_array.json
--- 0.26.4-3/tests/data/jsonschema/null_and_array.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/null_and_array.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+{
+    "$schema": "http://json-schema.org/schema#",
+    "type": "object",
+    "properties": {
+        "my_obj": {
+            "type": "array",
+            "items": {
+                "type": "object",
+                "properties": {
+                    "items": {
+                        "type": [
+                            "array",
+                            "null"
+                        ]
+                    }
+                },
+                "required": [
+                    "items"
+                ]
+            }
+        }
+    },
+    "required": [
+        "my_obj"
+    ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/nullable_any_of.json 0.45.0-1/tests/data/jsonschema/nullable_any_of.json
--- 0.26.4-3/tests/data/jsonschema/nullable_any_of.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/nullable_any_of.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,41 @@
+{
+  "type": "object",
+  "additionalProperties": false,
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "$id": "id1",
+  "title": "Validating Schema ID1",
+  "properties": {
+    "in": {
+      "type": "object",
+      "additionalProperties": false,
+      "properties": {
+        "input_dataset_path": {
+          "type": "string",
+          "minLength": 1,
+          "title": "Path to the input dataset",
+          "description": "d1"
+        },
+        "config": {
+          "anyOf": [
+            {
+              "type": "string",
+              "minLength": 1,
+              "title": "t2",
+              "description": "d2"
+            },
+            {
+              "type": [
+                "null"
+              ],
+              "title": "t3",
+              "description": "d3"
+            }
+          ]
+        }
+      }
+    },
+    "n1": {
+      "type": "integer"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/nullable_object.json 0.45.0-1/tests/data/jsonschema/nullable_object.json
--- 0.26.4-3/tests/data/jsonschema/nullable_object.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/nullable_object.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,41 @@
+{
+  "$schema": "http://json-schema.org/draft-04/schema#",
+  "id": "config_schema_v3.7.json",
+  "type": "object",
+  "required": [
+    "networks"
+  ],
+  "properties": {
+    "networks": {
+      "id": "#/properties/networks",
+      "type": "object",
+      "patternProperties": {
+        "^[a-zA-Z0-9._-]+$": {
+          "$ref": "#/definitions/network"
+        }
+      }
+    }
+  },
+  "patternProperties": {
+    "^x-": {}
+  },
+  "additionalProperties": false,
+  "definitions": {
+    "network": {
+      "id": "#/definitions/network",
+      "type": [
+        "object",
+        "null"
+      ],
+      "properties": {
+        "name": {
+          "type": "string"
+        }
+      },
+      "patternProperties": {
+        "^x-": {}
+      },
+      "additionalProperties": false
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/object_has_one_of.json 0.45.0-1/tests/data/jsonschema/object_has_one_of.json
--- 0.26.4-3/tests/data/jsonschema/object_has_one_of.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/object_has_one_of.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,82 @@
+{
+    "$schema": "https://json-schema.org/draft/2019-09/schema",
+    "type": "object",
+    "title": "v2_test",
+    "additionalProperties": true,
+    "oneOf": [
+        {
+            "type": "object",
+            "properties": {
+                "field_1": {
+                    "enum": [
+                        "response_1"
+                    ]
+                }
+            },
+            "additionalProperties": true,
+            "oneOf": [
+                {
+                    "type": "object",
+                    "properties": {
+                        "field_2": {
+                            "enum": [
+                                "response_a"
+                            ]
+                        }
+                    },
+                    "additionalProperties": true,
+                    "required": [
+                        "field_2"
+                    ]
+                }
+            ],
+            "required": [
+                "field_1"
+            ]
+        },
+        {
+            "type": "object",
+            "properties": {
+                "field_1": {
+                    "enum": [
+                        "response_2"
+                    ]
+                }
+            },
+            "additionalProperties": true,
+            "oneOf": [
+                {
+                    "type": "object",
+                    "properties": {
+                        "field_2": {
+                            "enum": [
+                                "response_b"
+                            ]
+                        }
+                    },
+                    "additionalProperties": true,
+                    "required": [
+                        "field_2"
+                    ]
+                },
+                {
+                    "type": "object",
+                    "properties": {
+                        "field_2": {
+                            "enum": [
+                                "response_c"
+                            ]
+                        }
+                    },
+                    "additionalProperties": true,
+                    "required": [
+                        "field_2"
+                    ]
+                }
+            ],
+            "required": [
+                "field_1"
+            ]
+        }
+    ]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/one_of_with_sub_schema_array_item.json 0.45.0-1/tests/data/jsonschema/one_of_with_sub_schema_array_item.json
--- 0.26.4-3/tests/data/jsonschema/one_of_with_sub_schema_array_item.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/one_of_with_sub_schema_array_item.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+{
+    "$schema": "http://json-schema.org/draft-04/schema#",
+  "title": "SpatialPlan",
+  "type": "object",
+  "properties": {
+    "officialDocument": {
+      "title": "officialDocument",
+      "description": "Link to the official documents that relate to the spatial plan.",
+      "oneOf": [
+        {
+          "type": "string"
+        },
+        {
+          "type": "array",
+          "minItems": 1,
+          "items": {
+            "type": "string",
+            "format": "uri"
+          },
+          "uniqueItems": true
+        }
+      ]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/oneof.json 0.45.0-1/tests/data/jsonschema/oneof.json
--- 0.26.4-3/tests/data/jsonschema/oneof.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/oneof.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+{
+    "properties": {
+        "item": {
+            "properties": {
+                "timeout": {
+                    "oneOf": [
+                        {
+                            "type": "string"
+                        },
+                        {
+                            "type": "integer"
+                        }
+                    ]
+                }
+            },
+            "type": "object"
+        }
+    }
+}
+
diff -pruN 0.26.4-3/tests/data/jsonschema/oneof.json.snapshot 0.45.0-1/tests/data/jsonschema/oneof.json.snapshot
--- 0.26.4-3/tests/data/jsonschema/oneof.json.snapshot	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/oneof.json.snapshot	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+class Item(BaseModel):
+    timeout: Optional[Union[str, int]] = None
+
+
+class OnOfObject(BaseModel):
+    item: Optional[Item] = None
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/oneof_const_enum.yaml 0.45.0-1/tests/data/jsonschema/oneof_const_enum.yaml
--- 0.26.4-3/tests/data/jsonschema/oneof_const_enum.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/oneof_const_enum.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+$schema: http://json-schema.org/draft-07/schema#
+title: NodeJS mode
+type: string
+oneOf:
+  - title: npm
+    const: npm
+  - title: yarn
+    const: yarn
+  - title: npm ci
+    const: npm_ci
diff -pruN 0.26.4-3/tests/data/jsonschema/oneof_const_enum_bool.json 0.45.0-1/tests/data/jsonschema/oneof_const_enum_bool.json
--- 0.26.4-3/tests/data/jsonschema/oneof_const_enum_bool.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/oneof_const_enum_bool.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Boolean Flag",
+  "oneOf": [
+    {"const": true},
+    {"const": false}
+  ]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/oneof_const_enum_float.json 0.45.0-1/tests/data/jsonschema/oneof_const_enum_float.json
--- 0.26.4-3/tests/data/jsonschema/oneof_const_enum_float.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/oneof_const_enum_float.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Ratio",
+  "oneOf": [
+    {"const": 0.5},
+    {"const": 1.0},
+    {"const": 1.5}
+  ]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/oneof_const_enum_infer_type.yaml 0.45.0-1/tests/data/jsonschema/oneof_const_enum_infer_type.yaml
--- 0.26.4-3/tests/data/jsonschema/oneof_const_enum_infer_type.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/oneof_const_enum_infer_type.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,5 @@
+$schema: http://json-schema.org/draft-07/schema#
+title: Inferred Type
+oneOf:
+  - const: "value1"
+  - const: "value2"
diff -pruN 0.26.4-3/tests/data/jsonschema/oneof_const_enum_int.yaml 0.45.0-1/tests/data/jsonschema/oneof_const_enum_int.yaml
--- 0.26.4-3/tests/data/jsonschema/oneof_const_enum_int.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/oneof_const_enum_int.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+$schema: http://json-schema.org/draft-07/schema#
+title: Status Code
+type: integer
+oneOf:
+  - title: OK
+    const: 200
+  - title: Not Found
+    const: 404
+  - title: Server Error
+    const: 500
diff -pruN 0.26.4-3/tests/data/jsonschema/oneof_const_enum_nested.yaml 0.45.0-1/tests/data/jsonschema/oneof_const_enum_nested.yaml
--- 0.26.4-3/tests/data/jsonschema/oneof_const_enum_nested.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/oneof_const_enum_nested.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+$schema: http://json-schema.org/draft-07/schema#
+type: object
+title: Config
+properties:
+  mode:
+    title: Mode
+    type: string
+    oneOf:
+      - title: fast
+        const: fast
+      - title: slow
+        const: slow
+  modes:
+    type: array
+    items:
+      type: string
+      oneOf:
+        - const: a
+        - const: b
diff -pruN 0.26.4-3/tests/data/jsonschema/oneof_const_enum_nullable.yaml 0.45.0-1/tests/data/jsonschema/oneof_const_enum_nullable.yaml
--- 0.26.4-3/tests/data/jsonschema/oneof_const_enum_nullable.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/oneof_const_enum_nullable.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+$schema: http://json-schema.org/draft-07/schema#
+title: Optional Mode
+type: string
+oneOf:
+  - title: enabled
+    const: enabled
+  - title: disabled
+    const: disabled
+  - type: "null"
diff -pruN 0.26.4-3/tests/data/jsonschema/oneof_const_enum_object.yaml 0.45.0-1/tests/data/jsonschema/oneof_const_enum_object.yaml
--- 0.26.4-3/tests/data/jsonschema/oneof_const_enum_object.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/oneof_const_enum_object.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+$schema: http://json-schema.org/draft-07/schema#
+title: ObjectConst
+oneOf:
+  - const:
+      key: value1
+  - const:
+      key: value2
diff -pruN 0.26.4-3/tests/data/jsonschema/oneof_const_enum_type_list.yaml 0.45.0-1/tests/data/jsonschema/oneof_const_enum_type_list.yaml
--- 0.26.4-3/tests/data/jsonschema/oneof_const_enum_type_list.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/oneof_const_enum_type_list.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+$schema: http://json-schema.org/draft-07/schema#
+title: Mode With Null
+type:
+  - string
+  - "null"
+oneOf:
+  - const: "on"
+  - const: "off"
diff -pruN 0.26.4-3/tests/data/jsonschema/oneof_const_enum_type_list_no_null.yaml 0.45.0-1/tests/data/jsonschema/oneof_const_enum_type_list_no_null.yaml
--- 0.26.4-3/tests/data/jsonschema/oneof_const_enum_type_list_no_null.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/oneof_const_enum_type_list_no_null.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+$schema: http://json-schema.org/draft-07/schema#
+title: MultiType
+type:
+  - string
+  - integer
+oneOf:
+  - const: "value1"
+  - const: "value2"
diff -pruN 0.26.4-3/tests/data/jsonschema/oneof_const_mixed_with_ref.yaml 0.45.0-1/tests/data/jsonschema/oneof_const_mixed_with_ref.yaml
--- 0.26.4-3/tests/data/jsonschema/oneof_const_mixed_with_ref.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/oneof_const_mixed_with_ref.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+$schema: http://json-schema.org/draft-07/schema#
+title: MixedUnion
+oneOf:
+  - const: value1
+  - $ref: "#/definitions/SomeType"
+definitions:
+  SomeType:
+    type: object
+    properties:
+      name:
+        type: string
diff -pruN 0.26.4-3/tests/data/jsonschema/oneof_const_with_properties.yaml 0.45.0-1/tests/data/jsonschema/oneof_const_with_properties.yaml
--- 0.26.4-3/tests/data/jsonschema/oneof_const_with_properties.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/oneof_const_with_properties.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+$schema: http://json-schema.org/draft-07/schema#
+title: ConstWithProps
+oneOf:
+  - const: value1
+    properties:
+      invalid: true
+  - const: value2
diff -pruN 0.26.4-3/tests/data/jsonschema/pattern.json 0.45.0-1/tests/data/jsonschema/pattern.json
--- 0.26.4-3/tests/data/jsonschema/pattern.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/pattern.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Info",
+  "type": "object",
+  "properties": {
+    "hostName": {
+      "type": "string",
+      "format": "hostname"
+    },
+    "arn": {
+      "type": "string",
+      "pattern": "(^arn:([^:]*):([^:]*):([^:]*):(|\\*|[\\d]{12}):(.+)$)|^\\*$"
+    },
+    "tel": {
+      "type": "string",
+      "pattern": "^(\\([0-9]{3}\\))?[0-9]{3}-[0-9]{4}$"
+    },
+    "comment": {
+        "type": "string",
+        "pattern": "[^\b\f\n\r\t\\\\a+.?'\"|()]+$"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/pattern_properties.json 0.45.0-1/tests/data/jsonschema/pattern_properties.json
--- 0.26.4-3/tests/data/jsonschema/pattern_properties.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/pattern_properties.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,28 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Foo",
+  "type": "object",
+  "required": [
+     "bar"
+  ],
+  "properties": {
+    "bar": {
+      "type": "object",
+      "patternProperties": {
+        "^([a-zA-Z_][a-zA-Z0-9_]*)$": {
+          "$ref": "#/definitions/Bar"
+        }
+      }
+    }
+  },
+  "definitions": {
+    "Bar": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/pattern_properties_by_reference.json 0.45.0-1/tests/data/jsonschema/pattern_properties_by_reference.json
--- 0.26.4-3/tests/data/jsonschema/pattern_properties_by_reference.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/pattern_properties_by_reference.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,54 @@
+
+{
+  "$defs": {
+    "stt": {
+      "additionalProperties": false,
+      "description": "STT properties allows to configure how the user voice is converted to text",
+      "properties": {
+        "timeout": {
+          "title": "STT Timeout",
+          "type": "number"
+        }
+      },
+      "title": "Speech to text Settings",
+      "type": "object"
+    },
+    "textResponse": {
+      "additionalProperties": false,
+      "type": "object",
+      "patternProperties": {
+        "^[a-z]{1}[0-9]{1}$": {
+          "phoneNumber": {
+            "additionalProperties": false,
+            "type": "number"
+          }
+        }
+      }
+    }
+  },
+  "$schema": "http://json-schema.org/draft-07/schema",
+  "additionalProperties": false,
+  "description": "The root schema comprises the entire JSON document",
+  "properties": {
+    "KeyWithExplicitPatternProperties": {
+      "additionalProperties": false,
+      "type": "object",
+      "patternProperties": {
+        "^[a-z]{1}[0-9]{1}$": {
+          "phoneNumber": {
+            "additionalProperties": false,
+            "type": "number"
+          }
+        }
+      }
+    },
+    "KeyWithPatternPropertiesByReference": {
+      "$ref": "#/$defs/textResponse"
+    },
+    "SomeOtherBoringReference": {
+      "$ref": "#/$defs/stt"
+    }
+  },
+  "title": "SomeSchema Schema",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/person.json 0.45.0-1/tests/data/jsonschema/person.json
--- 0.26.4-3/tests/data/jsonschema/person.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/person.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Person",
+  "type": "object",
+  "properties": {
+    "firstName": {
+      "type": "string",
+      "description": "The person's first name."
+    },
+    "lastName": {
+      "type": ["string", "null"],
+      "description": "The person's last name."
+    },
+    "age": {
+      "description": "Age in years which must be equal to or greater than zero.",
+      "type": "integer",
+      "minimum": 0
+    },
+    "friends": {
+      "type": "array"
+    },
+    "comment": {
+      "type": "null"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/pet_simple.json 0.45.0-1/tests/data/jsonschema/pet_simple.json
--- 0.26.4-3/tests/data/jsonschema/pet_simple.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/pet_simple.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Pet",
+  "type": "object",
+  "properties": {
+    "id": {
+      "type": "integer"
+    },
+    "name": {
+      "type": "string"
+    },
+    "tag": {
+      "type": "string"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/pydantic_v2_model_rebuild_inheritance.json 0.45.0-1/tests/data/jsonschema/pydantic_v2_model_rebuild_inheritance.json
--- 0.26.4-3/tests/data/jsonschema/pydantic_v2_model_rebuild_inheritance.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/pydantic_v2_model_rebuild_inheritance.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,31 @@
+{
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "$defs": {
+    "Base": {
+      "title": "Base",
+      "type": "object",
+      "properties": {
+        "next": {
+          "$ref": "#/$defs/Base"
+        }
+      }
+    },
+    "Derived": {
+      "title": "Derived",
+      "allOf": [
+        {
+          "$ref": "#/$defs/Base"
+        },
+        {
+          "type": "object",
+          "properties": {
+            "value": {
+              "type": "string"
+            }
+          }
+        }
+      ]
+    }
+  },
+  "$ref": "#/$defs/Derived"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/ref_type_has_null.json 0.45.0-1/tests/data/jsonschema/ref_type_has_null.json
--- 0.26.4-3/tests/data/jsonschema/ref_type_has_null.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/ref_type_has_null.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "NullableString": {
+      "type": ["string", "null"]
+    },
+    "NonNullableString": {
+      "type": "string"
+    },
+    "NullOnly": {
+      "type": "null"
+    }
+  },
+  "type": "object",
+  "required": ["nullableRef", "nonNullableRef", "nullOnlyRef"],
+  "properties": {
+    "nullableRef": {
+      "$ref": "#/definitions/NullableString"
+    },
+    "nonNullableRef": {
+      "$ref": "#/definitions/NonNullableString"
+    },
+    "nullOnlyRef": {
+      "$ref": "#/definitions/NullOnly"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/ref_with_additional_keywords/commons.schema.json 0.45.0-1/tests/data/jsonschema/ref_with_additional_keywords/commons.schema.json
--- 0.26.4-3/tests/data/jsonschema/ref_with_additional_keywords/commons.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/ref_with_additional_keywords/commons.schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "$id": "https://example.com/commons.schema.json",
+  "title": "Commons",
+  "description": "Commons objects",
+  "$defs": {
+    "defaultArray": {
+      "type": "array",
+      "minItems": 1,
+      "maxItems": 100
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/ref_with_additional_keywords/products.schema.json 0.45.0-1/tests/data/jsonschema/ref_with_additional_keywords/products.schema.json
--- 0.26.4-3/tests/data/jsonschema/ref_with_additional_keywords/products.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/ref_with_additional_keywords/products.schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "$id": "https://example.com/products.schema.json",
+  "title": "Products",
+  "description": "The products in the catalog",
+  "$ref": "commons.schema.json#/$defs/defaultArray",
+  "items": {
+    "type": "string"
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/require_referenced_field/referenced.json 0.45.0-1/tests/data/jsonschema/require_referenced_field/referenced.json
--- 0.26.4-3/tests/data/jsonschema/require_referenced_field/referenced.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/require_referenced_field/referenced.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema#",
+    "$id": "test",
+    "description": "test",
+    "type": "object",
+    "properties": {
+        "some_optional_property": {
+            "type": "string"
+        },
+        "some_optional_typed_property": {
+            "type": "string",
+            "format":"date-time"
+        }
+    }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/require_referenced_field/required.json 0.45.0-1/tests/data/jsonschema/require_referenced_field/required.json
--- 0.26.4-3/tests/data/jsonschema/require_referenced_field/required.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/require_referenced_field/required.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema#",
+    "$id": "test",
+    "description": "test",
+    "type": "object",
+    "allOf": [
+      {
+        "$ref": "referenced.json"
+      }
+    ],
+    "required" : ["some_optional_property", "some_optional_typed_property"]
+  }
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/required_and_any_of_required.json 0.45.0-1/tests/data/jsonschema/required_and_any_of_required.json
--- 0.26.4-3/tests/data/jsonschema/required_and_any_of_required.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/required_and_any_of_required.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "required": [
+    "foo"
+  ],
+  "properties": {
+    "foo": {
+      "type": "object",
+      "required": [
+        "bar"
+      ],
+      "anyOf": [{
+          "required": [
+            "baz"
+          ]
+        },
+        {
+          "required": [
+            "qux"
+          ]
+        }
+      ],
+      "properties": {
+        "bar": {
+          "type": "integer"
+        },
+        "baz": {
+          "type": "integer"
+        },
+        "qux": {
+          "type": "integer"
+        }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reserved_field_name_schema.json 0.45.0-1/tests/data/jsonschema/reserved_field_name_schema.json
--- 0.26.4-3/tests/data/jsonschema/reserved_field_name_schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reserved_field_name_schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "ModelWithSchemaField",
+  "type": "object",
+  "properties": {
+    "schema": {
+      "type": "string",
+      "description": "A field named 'schema' that should not be renamed in non-Pydantic models"
+    },
+    "name": {
+      "type": "string"
+    }
+  },
+  "required": ["schema", "name"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reserved_property.json 0.45.0-1/tests/data/jsonschema/reserved_property.json
--- 0.26.4-3/tests/data/jsonschema/reserved_property.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reserved_property.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "title": "ReservedNames",
+  "type": "object",
+  "properties": {
+    "json": {"type": "string"},
+    "schema": {"type": "string"},
+    "dict": {"type": "integer"}
+  },
+  "required": ["json", "schema", "dict"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree/schema_a.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree/schema_a.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree/schema_a.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree/schema_a.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "type": "object",
+  "properties": {
+    "data": { "$ref": "#/$defs/SharedModel" }
+  },
+  "$defs": {
+    "SharedModel": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" },
+        "name": { "type": "string" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree/schema_b.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree/schema_b.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree/schema_b.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree/schema_b.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "type": "object",
+  "properties": {
+    "info": { "$ref": "#/$defs/SharedModel" }
+  },
+  "$defs": {
+    "SharedModel": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" },
+        "name": { "type": "string" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_branch/aaa_first.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_branch/aaa_first.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_branch/aaa_first.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_branch/aaa_first.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "type": "object",
+  "properties": {
+    "data": { "$ref": "#/$defs/UniqueModelA" }
+  },
+  "$defs": {
+    "UniqueModelA": {
+      "type": "object",
+      "properties": {
+        "name": { "type": "string" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_branch/bbb_second.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_branch/bbb_second.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_branch/bbb_second.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_branch/bbb_second.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "type": "object",
+  "properties": {
+    "info": { "$ref": "#/$defs/UniqueModelB" }
+  },
+  "$defs": {
+    "UniqueModelB": {
+      "type": "object",
+      "properties": {
+        "value": { "type": "integer" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_branch/yyy_duplicate.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_branch/yyy_duplicate.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_branch/yyy_duplicate.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_branch/yyy_duplicate.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "type": "object",
+  "properties": {
+    "data": { "$ref": "#/$defs/SharedModel" }
+  },
+  "$defs": {
+    "SharedModel": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_branch/zzz_last.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_branch/zzz_last.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_branch/zzz_last.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_branch/zzz_last.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "type": "object",
+  "properties": {
+    "item": { "$ref": "#/$defs/SharedModel" }
+  },
+  "$defs": {
+    "SharedModel": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_conflict/schema_a.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_conflict/schema_a.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_conflict/schema_a.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_conflict/schema_a.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "type": "object",
+  "properties": {
+    "data": { "$ref": "#/$defs/SharedModel" }
+  },
+  "$defs": {
+    "SharedModel": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_conflict/shared.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_conflict/shared.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_conflict/shared.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_conflict/shared.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "type": "object",
+  "properties": {
+    "info": { "$ref": "#/$defs/SharedModel" }
+  },
+  "$defs": {
+    "SharedModel": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_conflict_dir/other.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_conflict_dir/other.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_conflict_dir/other.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_conflict_dir/other.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "type": "object",
+  "properties": {
+    "info": { "$ref": "#/$defs/SharedModel" }
+  },
+  "$defs": {
+    "SharedModel": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_conflict_dir/shared/schema.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_conflict_dir/shared/schema.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_conflict_dir/shared/schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_conflict_dir/shared/schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "type": "object",
+  "properties": {
+    "data": { "$ref": "#/$defs/SharedModel" }
+  },
+  "$defs": {
+    "SharedModel": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_dataclass/schema_a.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_dataclass/schema_a.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_dataclass/schema_a.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_dataclass/schema_a.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "type": "object",
+  "properties": {
+    "data": { "$ref": "#/$defs/SharedModel" }
+  },
+  "$defs": {
+    "SharedModel": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" },
+        "name": { "type": "string" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_dataclass/schema_b.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_dataclass/schema_b.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_dataclass/schema_b.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_dataclass/schema_b.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "type": "object",
+  "properties": {
+    "info": { "$ref": "#/$defs/SharedModel" }
+  },
+  "$defs": {
+    "SharedModel": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" },
+        "name": { "type": "string" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_enum/schema_a.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_enum/schema_a.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_enum/schema_a.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_enum/schema_a.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+{
+  "type": "object",
+  "properties": {
+    "status": { "$ref": "#/$defs/Status" }
+  },
+  "$defs": {
+    "Status": {
+      "type": "string",
+      "enum": ["active", "inactive", "pending"]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_enum/schema_b.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_enum/schema_b.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_enum/schema_b.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_enum/schema_b.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+{
+  "type": "object",
+  "properties": {
+    "state": { "$ref": "#/$defs/Status" }
+  },
+  "$defs": {
+    "Status": {
+      "type": "string",
+      "enum": ["active", "inactive", "pending"]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_multi/schema_a.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_multi/schema_a.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_multi/schema_a.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_multi/schema_a.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+{
+  "type": "object",
+  "properties": {
+    "data": { "$ref": "#/$defs/ModelA" },
+    "info": { "$ref": "#/$defs/SharedModel2" }
+  },
+  "$defs": {
+    "ModelA": {
+      "type": "object",
+      "properties": {
+        "name": { "type": "string" }
+      }
+    },
+    "SharedModel2": {
+      "type": "object",
+      "properties": {
+        "value": { "type": "string" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_multi/schema_b.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_multi/schema_b.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_multi/schema_b.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_multi/schema_b.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "type": "object",
+  "properties": {
+    "info": { "$ref": "#/$defs/SharedModel" }
+  },
+  "$defs": {
+    "SharedModel": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_multi/schema_c.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_multi/schema_c.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_multi/schema_c.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_multi/schema_c.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+{
+  "type": "object",
+  "properties": {
+    "data": { "$ref": "#/$defs/SharedModel" },
+    "extra": { "$ref": "#/$defs/SharedModel2" }
+  },
+  "$defs": {
+    "SharedModel": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" }
+      }
+    },
+    "SharedModel2": {
+      "type": "object",
+      "properties": {
+        "value": { "type": "string" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_multi/schema_d.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_multi/schema_d.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_multi/schema_d.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_multi/schema_d.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "type": "object",
+  "properties": {
+    "item": { "$ref": "#/$defs/SharedModel" }
+  },
+  "$defs": {
+    "SharedModel": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_no_conflict_dir/other.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_no_conflict_dir/other.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_no_conflict_dir/other.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_no_conflict_dir/other.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "type": "object",
+  "properties": {
+    "info": { "$ref": "#/$defs/ModelB" }
+  },
+  "$defs": {
+    "ModelB": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_no_conflict_dir/shared/schema.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_no_conflict_dir/shared/schema.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_no_conflict_dir/shared/schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_no_conflict_dir/shared/schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "type": "object",
+  "properties": {
+    "data": { "$ref": "#/$defs/ModelA" }
+  },
+  "$defs": {
+    "ModelA": {
+      "type": "object",
+      "properties": {
+        "name": { "type": "string" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_no_dup/schema_a.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_no_dup/schema_a.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_no_dup/schema_a.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_no_dup/schema_a.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "type": "object",
+  "properties": {
+    "name": { "type": "string" }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_no_dup/schema_b.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_no_dup/schema_b.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_no_dup/schema_b.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_no_dup/schema_b.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "type": "object",
+  "properties": {
+    "id": { "type": "integer" }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_self_ref/schema_a.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_self_ref/schema_a.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_self_ref/schema_a.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_self_ref/schema_a.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "type": "object",
+  "properties": {
+    "node": { "$ref": "#/$defs/Node" }
+  },
+  "$defs": {
+    "Node": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" },
+        "child": { "$ref": "#/$defs/Node" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_self_ref/schema_b.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_self_ref/schema_b.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_self_ref/schema_b.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_self_ref/schema_b.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "type": "object",
+  "properties": {
+    "tree": { "$ref": "#/$defs/Node" }
+  },
+  "$defs": {
+    "Node": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" },
+        "child": { "$ref": "#/$defs/Node" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_typeddict/schema_a.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_typeddict/schema_a.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_typeddict/schema_a.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_typeddict/schema_a.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "type": "object",
+  "properties": {
+    "data": { "$ref": "#/$defs/SharedModel" }
+  },
+  "$defs": {
+    "SharedModel": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" },
+        "name": { "type": "string" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_typeddict/schema_b.json 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_typeddict/schema_b.json
--- 0.26.4-3/tests/data/jsonschema/reuse_scope_tree_typeddict/schema_b.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/reuse_scope_tree_typeddict/schema_b.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "type": "object",
+  "properties": {
+    "info": { "$ref": "#/$defs/SharedModel" }
+  },
+  "$defs": {
+    "SharedModel": {
+      "type": "object",
+      "properties": {
+        "id": { "type": "integer" },
+        "name": { "type": "string" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/root_id.json 0.45.0-1/tests/data/jsonschema/root_id.json
--- 0.26.4-3/tests/data/jsonschema/root_id.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/root_id.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+{
+  "$id": "https://example.com/root_id.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Person": {
+      "$ref": "person.json"
+    },
+    "OriginalPerson": {
+      "$ref": "person.json"
+    },
+    "Pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "examples": ["dog", "cat"]
+        },
+        "owner": {
+           "$ref": "person.json"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/root_id_absolute_url.json 0.45.0-1/tests/data/jsonschema/root_id_absolute_url.json
--- 0.26.4-3/tests/data/jsonschema/root_id_absolute_url.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/root_id_absolute_url.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+{
+  "$id": "https://example.com/root_id.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Person": {
+      "$ref": "person.json"
+    },
+    "OriginalPerson": {
+      "$ref": "person.json"
+    },
+    "Pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "examples": ["dog", "cat"]
+        },
+        "owner": {
+           "$ref": "https://example.com/person.json"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/root_id_ref.json 0.45.0-1/tests/data/jsonschema/root_id_ref.json
--- 0.26.4-3/tests/data/jsonschema/root_id_ref.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/root_id_ref.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Person": {
+      "$ref": "root_id.json#/definitions/Person"
+    },
+    "OriginalPerson": {
+      "$ref": "root_id.json#/definitions/OriginalPerson"
+    },
+    "Pet": {
+      "$ref": "root_id.json#/definitions/Pet"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/root_id_self_ref.json 0.45.0-1/tests/data/jsonschema/root_id_self_ref.json
--- 0.26.4-3/tests/data/jsonschema/root_id_self_ref.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/root_id_self_ref.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+{
+  "$id": "https://example.com/root_id_self_ref.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Person": {
+      "$ref": "person.json"
+    },
+    "OriginalPerson": {
+      "$ref": "person.json"
+    },
+    "Pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "examples": ["dog", "cat"]
+        },
+        "owner": {
+           "$ref": "https://example.com/root_id_self_ref.json#/definitions/Person"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/root_model_default_value.json 0.45.0-1/tests/data/jsonschema/root_model_default_value.json
--- 0.26.4-3/tests/data/jsonschema/root_model_default_value.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/root_model_default_value.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,34 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "admin_state": {
+      "$ref": "#/$defs/AdminStateLeaf",
+      "default": "enable"
+    },
+    "count": {
+      "$ref": "#/$defs/CountType",
+      "default": 10
+    },
+    "name": {
+      "$ref": "#/$defs/NameType",
+      "default": "default_name"
+    }
+  },
+  "$defs": {
+    "AdminStateLeaf": {
+      "type": "string",
+      "enum": ["enable", "disable"]
+    },
+    "CountType": {
+      "type": "integer",
+      "minimum": 0,
+      "maximum": 100
+    },
+    "NameType": {
+      "type": "string",
+      "minLength": 1,
+      "maxLength": 50
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/root_model_default_value_branches.json 0.45.0-1/tests/data/jsonschema/root_model_default_value_branches.json
--- 0.26.4-3/tests/data/jsonschema/root_model_default_value_branches.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/root_model_default_value_branches.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "count_with_default": {
+      "$ref": "#/$defs/CountType",
+      "default": 10
+    },
+    "count_no_default": {
+      "$ref": "#/$defs/CountType"
+    },
+    "count_list_default": {
+      "type": "array",
+      "items": {
+        "$ref": "#/$defs/CountType"
+      },
+      "default": [1, 2, 3]
+    }
+  },
+  "$defs": {
+    "CountType": {
+      "type": "integer",
+      "minimum": 0,
+      "maximum": 100
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/root_model_default_value_non_root.json 0.45.0-1/tests/data/jsonschema/root_model_default_value_non_root.json
--- 0.26.4-3/tests/data/jsonschema/root_model_default_value_non_root.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/root_model_default_value_non_root.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "root_model_field": {
+      "$ref": "#/$defs/CountType",
+      "default": 10
+    },
+    "non_root_model_field": {
+      "$ref": "#/$defs/PersonType",
+      "default": {"name": "John"}
+    },
+    "primitive_field": {
+      "type": "string",
+      "default": "hello"
+    }
+  },
+  "$defs": {
+    "CountType": {
+      "type": "integer",
+      "minimum": 0,
+      "maximum": 100
+    },
+    "PersonType": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string"
+        }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/root_model_ordering.json 0.45.0-1/tests/data/jsonschema/root_model_ordering.json
--- 0.26.4-3/tests/data/jsonschema/root_model_ordering.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/root_model_ordering.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,128 @@
+{
+  "$defs": {
+    "Dog": {
+      "properties": {
+        "name": {
+          "const": "dog",
+          "default": "dog",
+          "title": "woof",
+          "type": "string"
+        },
+        "friends": {
+          "items": {
+            "discriminator": {
+              "mapping": {
+                "bird": "#/$defs/Bird",
+                "cat": "#/$defs/Cat",
+                "dog": "#/$defs/Dog"
+              },
+              "propertyName": "name"
+            },
+            "oneOf": [
+              {"$ref": "#/$defs/Dog"},
+              {"$ref": "#/$defs/Cat"},
+              {"$ref": "#/$defs/Bird"}
+            ],
+            "title": "Animal"
+          },
+          "title": "Friends",
+          "type": "array",
+          "default": []
+        }
+      },
+      "title": "Dog",
+      "type": "object"
+    },
+    "Cat": {
+      "properties": {
+        "name": {
+          "const": "cat",
+          "default": "cat",
+          "title": "meow",
+          "type": "string"
+        },
+        "friends": {
+          "items": {
+            "discriminator": {
+              "mapping": {
+                "bird": "#/$defs/Bird",
+                "cat": "#/$defs/Cat",
+                "dog": "#/$defs/Dog"
+              },
+              "propertyName": "name"
+            },
+            "oneOf": [
+              {"$ref": "#/$defs/Dog"},
+              {"$ref": "#/$defs/Cat"},
+              {"$ref": "#/$defs/Bird"}
+            ],
+            "title": "Animal"
+          },
+          "title": "Friends",
+          "type": "array",
+          "default": []
+        }
+      },
+      "title": "Cat",
+      "type": "object"
+    },
+    "Bird": {
+      "properties": {
+        "name": {
+          "const": "bird",
+          "default": "bird",
+          "title": "chirp",
+          "type": "string"
+        },
+        "friends": {
+          "items": {
+            "discriminator": {
+              "mapping": {
+                "bird": "#/$defs/Bird",
+                "cat": "#/$defs/Cat",
+                "dog": "#/$defs/Dog"
+              },
+              "propertyName": "name"
+            },
+            "oneOf": [
+              {"$ref": "#/$defs/Dog"},
+              {"$ref": "#/$defs/Cat"},
+              {"$ref": "#/$defs/Bird"}
+            ],
+            "title": "Animal"
+          },
+          "title": "Friends",
+          "type": "array",
+          "default": []
+        }
+      },
+      "title": "Bird",
+      "type": "object"
+    }
+  },
+  "properties": {
+    "animals": {
+      "default": [],
+      "items": {
+        "discriminator": {
+          "mapping": {
+            "bird": "#/$defs/Bird",
+            "cat": "#/$defs/Cat",
+            "dog": "#/$defs/Dog"
+          },
+          "propertyName": "name"
+        },
+        "oneOf": [
+          {"$ref": "#/$defs/Dog"},
+          {"$ref": "#/$defs/Cat"},
+          {"$ref": "#/$defs/Bird"}
+        ],
+        "title": "Animal"
+      },
+      "title": "Animals",
+      "type": "array"
+    }
+  },
+  "title": "Zoo",
+  "type": "object"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/root_model_with_additional_properties.json 0.45.0-1/tests/data/jsonschema/root_model_with_additional_properties.json
--- 0.26.4-3/tests/data/jsonschema/root_model_with_additional_properties.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/root_model_with_additional_properties.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,127 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "$id": "test.json",
+  "description": "test",
+  "type": "object",
+  "required": [
+    "test_id",
+    "test_ip",
+    "result",
+    "nested_object_result",
+    "nested_enum_result"
+  ],
+  "properties": {
+    "test_id": {
+      "type": "string",
+      "description": "test ID"
+    },
+    "test_ip": {
+      "type": "string",
+      "description": "test IP"
+    },
+    "result": {
+      "type": "object",
+      "additionalProperties": {
+        "type": "integer"
+      }
+    },
+    "nested_object_result": {
+      "type": "object",
+      "additionalProperties": {
+        "type": "object",
+        "properties": {
+          "status":{
+            "type": "integer"
+          }
+        },
+        "required": ["status"]
+      }
+    },
+    "nested_enum_result": {
+      "type": "object",
+      "additionalProperties": {
+        "enum": ["red", "green"]
+      }
+    },
+    "all_of_result" :{
+      "type" : "object",
+      "additionalProperties" :
+      {
+        "allOf" : [
+          { "$ref" : "#/definitions/User" },
+          { "type" : "object",
+            "properties": {
+              "description": {"type" : "string" }
+            }
+          }
+        ]
+      }
+    },
+    "one_of_result" :{
+      "type" : "object",
+      "additionalProperties" :
+      {
+        "oneOf" : [
+          { "$ref" : "#/definitions/User" },
+          { "type" : "object",
+            "properties": {
+              "description": {"type" : "string" }
+            }
+          }
+        ]
+      }
+    },
+    "any_of_result" :{
+      "type" : "object",
+      "additionalProperties" :
+      {
+        "anyOf" : [
+          { "$ref" : "#/definitions/User" },
+          { "type" : "object",
+            "properties": {
+              "description": {"type" : "string" }
+            }
+          }
+        ]
+      }
+    },
+    "all_of_with_unknown_object" :{
+      "type" : "object",
+      "additionalProperties" :
+      {
+        "allOf" : [
+          { "$ref" : "#/definitions/User" },
+          { "description": "TODO" }
+        ]
+      }
+    },
+    "objectRef": {
+      "type": "object",
+      "additionalProperties": {
+        "$ref": "#/definitions/User"
+      }
+    },
+    "deepNestedObjectRef": {
+      "type": "object",
+      "additionalProperties": {
+        "type": "object",
+        "additionalProperties": {
+          "type": "object",
+          "additionalProperties": {
+             "$ref": "#/definitions/User"
+          }
+        }
+      }
+    }
+  },
+  "definitions": {
+    "User": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/root_one_of/bar.json 0.45.0-1/tests/data/jsonschema/root_one_of/bar.json
--- 0.26.4-3/tests/data/jsonschema/root_one_of/bar.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/root_one_of/bar.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "$id": "bar.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "JobRun",
+  "description": "Scheduled Execution Context Configuration.",
+  "type": "object",
+  "properties": {
+    "enabled": {
+      "description": "If Live Execution is Enabled.",
+      "type": "boolean",
+      "default": false
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/root_one_of/foo.json 0.45.0-1/tests/data/jsonschema/root_one_of/foo.json
--- 0.26.4-3/tests/data/jsonschema/root_one_of/foo.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/root_one_of/foo.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+{
+  "$id": "foo.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "JobRun",
+  "description": "Live Execution object.",
+  "type": "object",
+  "properties": {
+    "enabled": {
+      "description": "If Live Execution is enabled",
+      "type": "boolean",
+      "default": false
+    },
+    "resources": {
+      "description": "Resource full classname to register to extend any endpoints.",
+      "type": "array",
+      "items": {
+        "type": "string"
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/root_one_of/union.json 0.45.0-1/tests/data/jsonschema/root_one_of/union.json
--- 0.26.4-3/tests/data/jsonschema/root_one_of/union.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/root_one_of/union.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+{
+  "$id": "union.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "App",
+  "description": "This schema defines the applications for Open-Metadata.",
+  "type": "object",
+  "definitions": {
+    "executionContext": {
+      "description": "Execution Configuration.",
+      "oneOf": [
+        {
+          "$ref": "./foo.json"
+        },
+        {
+          "$ref": "./bar.json"
+        }
+      ],
+      "additionalProperties": false
+    }
+  },
+  "properties": {
+    "runtime": {
+      "description": "Execution Configuration.",
+      "$ref": "#/definitions/executionContext"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/same_name_objects.json 0.45.0-1/tests/data/jsonschema/same_name_objects.json
--- 0.26.4-3/tests/data/jsonschema/same_name_objects.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/same_name_objects.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+{
+  "$id": "https://example.com/same_name_objects.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "friends": {
+      "type": "object",
+      "additionalProperties": false
+    },
+    "tst1": {
+        "$ref": "person.json#/properties/friends"
+    },
+    "tst2": {
+        "$ref": "person.json#/properties/friends"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/self_reference.json 0.45.0-1/tests/data/jsonschema/self_reference.json
--- 0.26.4-3/tests/data/jsonschema/self_reference.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/self_reference.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "examples": ["dog", "cat"]
+        },
+        "friends": {
+          "type": "array",
+          "minItems": 1,
+          "items": [
+            {
+              "$ref": "#/definitions/Pet"
+            }
+          ]
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/similar_nested_array.json 0.45.0-1/tests/data/jsonschema/similar_nested_array.json
--- 0.26.4-3/tests/data/jsonschema/similar_nested_array.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/similar_nested_array.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,93 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "ObjectA": {
+      "properties": {
+        "data": {
+          "items": {
+            "properties": {
+              "keyA": {
+                "type": "string"
+              }
+            },
+            "type": "object"
+          },
+          "type": "array"
+        }
+      },
+      "type": "object"
+    },
+    "ObjectB": {
+      "properties": {
+        "data": {
+          "items": {
+            "properties": {
+              "keyB": {
+                "type": "string"
+              }
+            },
+            "type": "object"
+          },
+          "type": "array"
+        }
+      },
+      "type": "object"
+    },
+    "ObjectC": {
+      "properties": {
+        "keyC": {
+          "anyOf": [
+              {
+                "type": "object",
+                "properties": {
+                  "nestedA": {
+                    "type": "string"
+                  }
+                }
+              },
+            {
+                "type": "object",
+                "properties": {
+                  "nestedB": {
+                    "type": "string"
+                  }
+                }
+              }
+          ]
+        }
+      }
+    },
+    "ObjectD": {
+      "properties": {
+        "keyC": {
+          "items": [
+              {
+                "type": "object",
+                "properties": {
+                  "nestedA": {
+                    "type": "string"
+                  }
+                }
+              },
+            {
+                "type": "object",
+                "properties": {
+                  "nestedB": {
+                    "type": "string"
+                  }
+                }
+              },
+            {
+                "type": "string",
+                "enum": ["dog", "cat", "snake"]
+              },
+                        {
+                "type": "string",
+                "enum": ["orange", "apple", "milk"]
+              }
+          ]
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/simple_frozen_test.json 0.45.0-1/tests/data/jsonschema/simple_frozen_test.json
--- 0.26.4-3/tests/data/jsonschema/simple_frozen_test.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/simple_frozen_test.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "title": "User",
+  "properties": {
+    "name": {
+      "type": "string"
+    },
+    "age": {
+      "type": "integer"
+    },
+    "email": {
+      "type": "string",
+      "format": "email"
+    }
+  },
+  "required": ["name", "age"]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/simple_string.json 0.45.0-1/tests/data/jsonschema/simple_string.json
--- 0.26.4-3/tests/data/jsonschema/simple_string.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/simple_string.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,6 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema",
+  "type": "object",
+  "properties": {"s": {"type": ["string"]}},
+  "required": ["s"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/skip_root_model_test.json 0.45.0-1/tests/data/jsonschema/skip_root_model_test.json
--- 0.26.4-3/tests/data/jsonschema/skip_root_model_test.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/skip_root_model_test.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "_Placeholder",
+  "type": "null",
+  "$defs": {
+    "Person": {
+      "type": "object",
+      "properties": {
+        "name": {"type": "string"},
+        "age": {"type": "integer"}
+      },
+      "required": ["name"]
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/space_field_enum.json 0.45.0-1/tests/data/jsonschema/space_field_enum.json
--- 0.26.4-3/tests/data/jsonschema/space_field_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/space_field_enum.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "SpaceIF": {
+      "$ref": "#/definitions/SpaceIF"
+    }
+  },
+  "definitions": {
+    "SpaceIF": {
+      "type": "string",
+      "enum": [
+        "Space Field"
+      ]
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/special_enum.json 0.45.0-1/tests/data/jsonschema/special_enum.json
--- 0.26.4-3/tests/data/jsonschema/special_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/special_enum.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "string",
+  "enum": [
+    true,
+    false,
+    "",
+    "\n",
+    "\r\n",
+    "\t",
+    "\b",
+    null,
+    "\\"
+  ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/special_field_name.json 0.45.0-1/tests/data/jsonschema/special_field_name.json
--- 0.26.4-3/tests/data/jsonschema/special_field_name.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/special_field_name.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "SpecialField",
+  "type": "object",
+  "properties": {
+    "global": {
+      "type": "string"
+    },
+    "with": {
+      "type": "string"
+    },
+    "class": {
+      "type": "integer"
+    },
+    "class's": {
+      "type": "integer"
+    },
+    "class-s": {
+      "type": "string"
+    },
+    "#": {
+      "type": "string"
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/special_field_name_with_inheritance_model.json 0.45.0-1/tests/data/jsonschema/special_field_name_with_inheritance_model.json
--- 0.26.4-3/tests/data/jsonschema/special_field_name_with_inheritance_model.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/special_field_name_with_inheritance_model.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,51 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "SpecialField",
+  "type": "object",
+  "properties": {
+    "global": {
+      "type": "string"
+    },
+    "with": {
+      "type": "string"
+    },
+    "class": {
+      "type": "integer"
+    },
+    "class's": {
+      "type": "integer"
+    },
+    "class-s": {
+      "type": "string"
+    },
+    "#": {
+      "type": "string"
+    }
+  },
+  "allOf": [
+    {
+      "$ref": "#/definitions/base"
+    }
+  ],
+  "definitions": {
+    "base": {
+      "allOf": [
+        {
+          "$ref": "#/definitions/nestedBase"
+        }
+      ],
+      "properties": {
+        "name": {
+          "type": "string"
+        }
+      }
+    },
+    "nestedBase": {
+      "properties": {
+        "age": {
+          "type": "string"
+        }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/special_prefix_model.json 0.45.0-1/tests/data/jsonschema/special_prefix_model.json
--- 0.26.4-3/tests/data/jsonschema/special_prefix_model.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/special_prefix_model.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+{
+    "$id": "schema_v2.json",
+    "$schema": "http://json-schema.org/schema#",
+
+    "type": "object",
+    "properties": {
+        "@id": {
+            "type": "string",
+            "format": "uri",
+            "pattern": "^http.*$",
+            "title": "Id must be presesnt and must be a URI"
+        },
+        "@type": { "type": "string" },
+        "@+!type": { "type": "string" },
+        "@-!type": { "type": "string" },
+        "profile": { "type": "string" }
+    },
+    "required": ["@id", "@type"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/stdin_oneof_ref.json 0.45.0-1/tests/data/jsonschema/stdin_oneof_ref.json
--- 0.26.4-3/tests/data/jsonschema/stdin_oneof_ref.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/stdin_oneof_ref.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+{
+  "$schema": "https://json-schema.org/draft/2019-09/schema",
+  "type": "object",
+  "oneOf": [
+    { "$ref": "#/definitions/test" }
+  ],
+  "definitions": {
+    "test": {
+      "type": "object",
+      "properties": {
+        "name": { "type": "string" }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/strict_types.json 0.45.0-1/tests/data/jsonschema/strict_types.json
--- 0.26.4-3/tests/data/jsonschema/strict_types.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/strict_types.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,50 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "User",
+  "type": "object",
+  "properties": {
+        "name": {
+          "type": "string",
+          "example": "ken"
+        },
+        "age": {
+          "type": "integer"
+        },
+        "salary": {
+          "type": "integer",
+          "minimum": 0
+        },
+        "debt" : {
+          "type": "integer",
+          "maximum": 0
+        },
+        "loan" : {
+          "type": "number",
+          "maximum": 0
+        },
+        "tel": {
+          "type": "string",
+          "pattern": "^(\\([0-9]{3}\\))?[0-9]{3}-[0-9]{4}$"
+        },
+        "height": {
+          "type": "number",
+          "minimum": 0
+        },
+        "weight": {
+          "type": "number",
+          "minimum": 0
+        },
+        "score": {
+          "type": "number",
+          "minimum": 1e-08
+        },
+        "active": {
+          "type": "boolean"
+        },
+        "photo": {
+          "type": "string",
+          "format": "binary",
+          "minLength": 100
+        }
+      }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/string_dict.json 0.45.0-1/tests/data/jsonschema/string_dict.json
--- 0.26.4-3/tests/data/jsonschema/string_dict.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/string_dict.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+{
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "type": "object",
+  "propertyNames": {
+    "type": "string"
+  },
+  "additionalProperties": {
+    "type": "string"
+  },
+  "title": "MyStringDict"
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/string_enum.json 0.45.0-1/tests/data/jsonschema/string_enum.json
--- 0.26.4-3/tests/data/jsonschema/string_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/string_enum.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1 @@
+{"type": "string", "enum": ["A", "B"]}
diff -pruN 0.26.4-3/tests/data/jsonschema/subclass_enum.json 0.45.0-1/tests/data/jsonschema/subclass_enum.json
--- 0.26.4-3/tests/data/jsonschema/subclass_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/subclass_enum.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,51 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "IntEnum": {
+      "type": "integer",
+      "enum": [
+        1,
+        2,
+        3
+      ]
+    },
+    "FloatEnum": {
+      "type": "number",
+      "enum": [
+        1.1,
+        2.1,
+        3.1
+      ]
+    },
+    "StrEnum": {
+      "type": "string",
+      "enum": [
+        "1",
+        "2",
+        "3"
+      ]
+    },
+    "NonTypedEnum": {
+      "enum": [
+        "1",
+        "2",
+        "3"
+      ]
+    },
+    "BooleanEnum": {
+      "type": "boolean",
+      "enum": [
+        true,
+        false
+      ]
+    },
+    "UnknownEnum": {
+      "type": "unknown",
+      "enum": [
+        "a",
+        "b"
+      ]
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/title_with_dots.json 0.45.0-1/tests/data/jsonschema/title_with_dots.json
--- 0.26.4-3/tests/data/jsonschema/title_with_dots.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/title_with_dots.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "GenomeStudio 2.0 - Methylation Module",
+  "type": "object",
+  "properties": {
+    "version": {
+      "type": "string"
+    },
+    "nested": {
+      "title": "Nested 1.5",
+      "type": "object",
+      "properties": {
+        "value": {
+          "type": "integer"
+        }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/titles.json 0.45.0-1/tests/data/jsonschema/titles.json
--- 0.26.4-3/tests/data/jsonschema/titles.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/titles.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,91 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "ProcessingStatus": {
+      "title": "Processing Status Title",
+      "enum": [
+        "COMPLETED",
+        "PENDING",
+        "FAILED"
+      ],
+      "type": "string",
+      "description": "The processing status"
+    },
+    "kind": {
+      "type": "string"
+    },
+    "ExtendedProcessingTask": {
+      "title": "Extended Processing Task Title",
+      "oneOf": [
+        {
+          "$ref": "#"
+        },
+        {
+          "type": "object",
+          "title": "NestedCommentTitle",
+          "properties": {
+            "comment": {
+              "type": "string"
+            }
+          }
+        }
+      ]
+    },
+    "ExtendedProcessingTasks": {
+      "title": "Extended Processing Tasks Title",
+      "type": "array",
+      "items": [
+        {
+          "$ref": "#/definitions/ExtendedProcessingTask"
+        }
+      ]
+    },
+    "ProcessingTask": {
+      "title": "Processing Task Title",
+      "type": "object",
+      "properties": {
+        "processing_status_union": {
+          "title": "Processing Status Union Title",
+          "oneOf": [
+            {
+              "title": "Processing Status Detail",
+              "type": "object",
+              "properties": {
+                "id": {
+                  "type": "integer"
+                },
+                "description": {
+                  "type": "string"
+                }
+              }
+            },
+            {
+              "$ref": "#/definitions/ExtendedProcessingTask"
+            },
+            {
+              "$ref": "#/definitions/ProcessingStatus"
+            }
+          ],
+          "default": "COMPLETED"
+        },
+        "processing_status": {
+          "$ref": "#/definitions/ProcessingStatus",
+          "default": "COMPLETED"
+        },
+        "name": {
+          "type": "string"
+        },
+        "kind": {
+          "$ref": "#/definitions/kind"
+        }
+      }
+    }
+  },
+  "title": "Processing Tasks Title",
+  "type": "array",
+      "items": [
+        {
+          "$ref": "#/definitions/ProcessingTask"
+        }
+      ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.basic_enum.json 0.45.0-1/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.basic_enum.json
--- 0.26.4-3/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.basic_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.basic_enum.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+{
+  "definitions": {
+    "ProcessingStatus": {
+      "title": "ProcessingStatus",
+      "enum": [
+        "COMPLETED",
+        "PENDING",
+        "FAILED"
+      ],
+      "type": "string",
+      "description": "The processing status"
+    }
+  },
+  "properties": {
+    "processing_status": {
+      "$ref": "#/definitions/ProcessingStatus",
+      "default": "COMPLETED"
+    },
+    "name": {
+      "type": "string"
+    }
+  },  
+  "title": "Basic Enum",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.input.json 0.45.0-1/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.input.json
--- 0.26.4-3/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.input.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.input.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,13 @@
+{
+  "properties": {
+    "input": {
+      "default": "input",
+      "title": "Input"
+    },
+    "extType": {
+      "$ref": "schema.json"
+    }
+  },
+  "title": "Input",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.output.json 0.45.0-1/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.output.json
--- 0.26.4-3/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.output.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/treat_dot_as_module/complex.directory/api.path.output.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "properties": {
+    "output": {
+      "default": "output",
+      "title": "Output"
+    }
+  },
+  "title": "Output",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/treat_dot_as_module/complex.directory/schema.json 0.45.0-1/tests/data/jsonschema/treat_dot_as_module/complex.directory/schema.json
--- 0.26.4-3/tests/data/jsonschema/treat_dot_as_module/complex.directory/schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/treat_dot_as_module/complex.directory/schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+{
+  "properties": {
+    "ExtType": {
+      "type": "a",
+      "title": "ExtType"
+    }
+  },
+  "title": "ExtType",
+  "type": "object"
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/treat_dot_as_module_single/model.schema.json 0.45.0-1/tests/data/jsonschema/treat_dot_as_module_single/model.schema.json
--- 0.26.4-3/tests/data/jsonschema/treat_dot_as_module_single/model.schema.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/treat_dot_as_module_single/model.schema.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "User",
+  "type": "object",
+  "properties": {
+    "name": {
+      "type": "string"
+    },
+    "age": {
+      "type": "integer"
+    }
+  },
+  "required": ["name"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/tutorial_pet.json 0.45.0-1/tests/data/jsonschema/tutorial_pet.json
--- 0.26.4-3/tests/data/jsonschema/tutorial_pet.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/tutorial_pet.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Pet",
+  "type": "object",
+  "required": ["name", "species"],
+  "properties": {
+    "name": {
+      "type": "string",
+      "description": "The pet's name"
+    },
+    "species": {
+      "type": "string",
+      "enum": ["dog", "cat", "bird", "fish"]
+    },
+    "age": {
+      "type": "integer",
+      "minimum": 0,
+      "description": "Age in years"
+    },
+    "vaccinated": {
+      "type": "boolean",
+      "default": false
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/type_alias.json 0.45.0-1/tests/data/jsonschema/type_alias.json
--- 0.26.4-3/tests/data/jsonschema/type_alias.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/type_alias.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,35 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "SimpleString": {
+      "type": "string"
+    },
+    "UnionType": {
+      "anyOf": [
+        {"type": "string"},
+        {"type": "integer"}
+      ]
+    },
+    "ArrayType": {
+      "type": "array",
+      "items": {"type": "string"}
+    },
+    "AnnotatedType": {
+      "title": "MyAnnotatedType",
+      "description": "An annotated union type",
+      "anyOf": [
+        {"type": "string"},
+        {"type": "boolean"}
+      ]
+    },
+    "ModelWithTypeAliasField": {
+      "type": "object",
+      "properties": {
+        "simple_field": {"$ref": "#/definitions/SimpleString"},
+        "union_field": {"$ref": "#/definitions/UnionType"},
+        "array_field": {"$ref": "#/definitions/ArrayType"},
+        "annotated_field": {"$ref": "#/definitions/AnnotatedType"}
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/type_alias_cycle.json 0.45.0-1/tests/data/jsonschema/type_alias_cycle.json
--- 0.26.4-3/tests/data/jsonschema/type_alias_cycle.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/type_alias_cycle.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+{
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "$defs": {
+    "A": {
+      "title": "A",
+      "type": "array",
+      "items": {
+        "$ref": "#/$defs/B"
+      }
+    },
+    "B": {
+      "title": "B",
+      "type": "array",
+      "items": {
+        "$ref": "#/$defs/A"
+      }
+    }
+  },
+  "anyOf": [
+    {
+      "$ref": "#/$defs/A"
+    },
+    {
+      "$ref": "#/$defs/B"
+    }
+  ]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/type_alias_forward_ref.json 0.45.0-1/tests/data/jsonschema/type_alias_forward_ref.json
--- 0.26.4-3/tests/data/jsonschema/type_alias_forward_ref.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/type_alias_forward_ref.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,45 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "_Placeholder",
+  "type": "null",
+  "$defs": {
+    "TextPart": {
+      "properties": {
+        "type": {
+          "const": "text",
+          "type": "string"
+        },
+        "content": {
+          "type": "string"
+        }
+      },
+      "required": ["type", "content"],
+      "title": "TextPart",
+      "type": "object"
+    },
+    "BlobPart": {
+      "properties": {
+        "type": {
+          "const": "blob",
+          "type": "string"
+        },
+        "data": {
+          "type": "string"
+        }
+      },
+      "required": ["type", "data"],
+      "title": "BlobPart",
+      "type": "object"
+    },
+    "SystemInstructions": {
+      "type": "array",
+      "items": {
+        "anyOf": [
+          {"$ref": "#/$defs/TextPart"},
+          {"$ref": "#/$defs/BlobPart"}
+        ]
+      },
+      "title": "SystemInstructions"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/type_alias_with_circular_ref_to_class.json 0.45.0-1/tests/data/jsonschema/type_alias_with_circular_ref_to_class.json
--- 0.26.4-3/tests/data/jsonschema/type_alias_with_circular_ref_to_class.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/type_alias_with_circular_ref_to_class.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+{
+  "$schema": "https://json-schema.org/draft/2020-12/schema",
+  "$defs": {
+    "SpanB": {
+      "type": "object",
+      "properties": {
+        "recur": { "type": "array", "items": [{ "$ref": "#/$defs/Span" }] }
+      },
+      "required": ["recur"]
+    },
+    "Either": { "oneOf": [{ "$ref": "#/$defs/SpanB" }, { "$ref": "#/$defs/Span" }] },
+    "Span": {
+      "type": "object",
+      "properties": {
+        "recur": {
+          "type": "array",
+          "items": [{ "$ref": "#/$defs/Either" }]
+        }
+      },
+      "required": ["recur"]
+    }
+  },
+  "title": "Defaults",
+  "type": "object",
+  "properties": {
+    "a": {
+      "type": "array",
+      "items": [{ "$ref": "#/$defs/Span" }]
+    }
+  },
+  "required": ["a"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/type_mappings.json 0.45.0-1/tests/data/jsonschema/type_mappings.json
--- 0.26.4-3/tests/data/jsonschema/type_mappings.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/type_mappings.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,22 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "BlobModel",
+  "type": "object",
+  "properties": {
+    "content": {
+      "type": "string",
+      "format": "binary",
+      "description": "Binary content that should be mapped to string"
+    },
+    "data": {
+      "type": "string",
+      "format": "byte",
+      "description": "Base64 encoded data"
+    },
+    "name": {
+      "type": "string",
+      "description": "Regular string field"
+    }
+  },
+  "required": ["content", "data", "name"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/use_attribute_docstrings_test.json 0.45.0-1/tests/data/jsonschema/use_attribute_docstrings_test.json
--- 0.26.4-3/tests/data/jsonschema/use_attribute_docstrings_test.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/use_attribute_docstrings_test.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "title": "Person",
+  "properties": {
+    "name": {
+      "type": "string",
+      "description": "The person's full name"
+    },
+    "age": {
+      "type": "integer",
+      "description": "The person's age in years"
+    }
+  },
+  "required": ["name"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/use_decimal_for_multiple_of.json 0.45.0-1/tests/data/jsonschema/use_decimal_for_multiple_of.json
--- 0.26.4-3/tests/data/jsonschema/use_decimal_for_multiple_of.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/use_decimal_for_multiple_of.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+{
+  "type": "object",
+  "properties": {
+    "price": {
+      "type": "number",
+      "multipleOf": 0.01,
+      "minimum": 0,
+      "maximum": 99999.99
+    },
+    "quantity": {
+      "type": "number",
+      "multipleOf": 0.1
+    },
+    "rate": {
+      "type": "number",
+      "multipleOf": 0.001,
+      "exclusiveMinimum": 0,
+      "exclusiveMaximum": 1
+    },
+    "simple_float": {
+      "type": "number",
+      "minimum": 0,
+      "maximum": 100
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/use_default_with_const.json 0.45.0-1/tests/data/jsonschema/use_default_with_const.json
--- 0.26.4-3/tests/data/jsonschema/use_default_with_const.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/use_default_with_const.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+{
+    "$schema": "http://json-schema.org/draft-07/schema#",
+    "type": "object",
+    "title": "Use default with const",
+    "properties": {
+        "foo": {
+            "const": "foo"
+        }
+    }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/use_frozen_field.json 0.45.0-1/tests/data/jsonschema/use_frozen_field.json
--- 0.26.4-3/tests/data/jsonschema/use_frozen_field.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/use_frozen_field.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "User",
+  "type": "object",
+  "required": ["id", "name", "password"],
+  "properties": {
+    "id": {
+      "type": "integer",
+      "description": "Server-generated ID",
+      "readOnly": true
+    },
+    "name": {
+      "type": "string"
+    },
+    "password": {
+      "type": "string",
+      "description": "User password",
+      "writeOnly": true
+    },
+    "created_at": {
+      "type": "string",
+      "format": "date-time",
+      "readOnly": true
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/use_non_positive_negative.json 0.45.0-1/tests/data/jsonschema/use_non_positive_negative.json
--- 0.26.4-3/tests/data/jsonschema/use_non_positive_negative.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/use_non_positive_negative.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "NumberConstraints",
+  "type": "object",
+  "properties": {
+    "non_negative_count": {
+      "type": "integer",
+      "minimum": 0,
+      "description": "A count that cannot be negative"
+    },
+    "non_positive_balance": {
+      "type": "integer",
+      "maximum": 0,
+      "description": "A balance that cannot be positive"
+    },
+    "non_negative_amount": {
+      "type": "number",
+      "minimum": 0,
+      "description": "An amount that cannot be negative"
+    },
+    "non_positive_score": {
+      "type": "number",
+      "maximum": 0,
+      "description": "A score that cannot be positive"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/use_pendulum.json 0.45.0-1/tests/data/jsonschema/use_pendulum.json
--- 0.26.4-3/tests/data/jsonschema/use_pendulum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/use_pendulum.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Event",
+  "type": "object",
+  "properties": {
+    "name": {
+      "type": "string"
+    },
+    "created_at": {
+      "type": "string",
+      "format": "date-time"
+    },
+    "event_date": {
+      "type": "string",
+      "format": "date"
+    },
+    "duration": {
+      "type": "string",
+      "format": "duration"
+    }
+  },
+  "required": ["name", "created_at"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/user.json 0.45.0-1/tests/data/jsonschema/user.json
--- 0.26.4-3/tests/data/jsonschema/user.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/user.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "User": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "example": "ken"
+        },
+        "pets": {
+          "type": "array",
+          "items": {
+            "$ref": "#/definitions/User"
+          },
+          "default_factory": "list"
+        }
+      }
+    },
+    "Pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "examples": [
+            "dog",
+            "cat"
+          ]
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/jsonschema/user_default.json 0.45.0-1/tests/data/jsonschema/user_default.json
--- 0.26.4-3/tests/data/jsonschema/user_default.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/user_default.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,34 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "User": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "example": "ken"
+        },
+        "pets": {
+          "type": "array",
+          "items": {
+            "$ref": "#/definitions/User"
+          },
+          "default": ["dog", "cat"]
+        }
+      }
+    },
+    "Pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "examples": [
+            "dog",
+            "cat"
+          ],
+          "default": "dog"
+        }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/user_defs.json 0.45.0-1/tests/data/jsonschema/user_defs.json
--- 0.26.4-3/tests/data/jsonschema/user_defs.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/user_defs.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+{
+  "$schema": "https://json-schema.org/draft/2019-09/schema",
+  "$defs": {
+    "User": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "example": "ken"
+        },
+        "pets": {
+          "type": "array",
+          "items": {
+            "$ref": "#/$defs/User"
+          },
+          "default_factory": "list"
+        }
+      }
+    },
+    "Pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "examples": [
+            "dog",
+            "cat"
+          ]
+        }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/with_anchor.json 0.45.0-1/tests/data/jsonschema/with_anchor.json
--- 0.26.4-3/tests/data/jsonschema/with_anchor.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/with_anchor.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+{
+  "$id": "http://example.com/root.json",
+  "type": "object",
+  "definitions": {
+    "Address": {
+      "$id": "#address",
+      "type": "object",
+      "properties": {
+        "street": { "type": "string" }
+      },
+      "required": ["street"]
+    }
+  },
+  "properties": {
+    "billing_address": { "$ref": "#address" }
+  },
+  "required": ["billing_address"]
+}
diff -pruN 0.26.4-3/tests/data/jsonschema/without_titles.json 0.45.0-1/tests/data/jsonschema/without_titles.json
--- 0.26.4-3/tests/data/jsonschema/without_titles.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/jsonschema/without_titles.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,83 @@
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "ProcessingStatus": {
+      "enum": [
+        "COMPLETED",
+        "PENDING",
+        "FAILED"
+      ],
+      "type": "string",
+      "description": "The processing status"
+    },
+    "kind": {
+      "type": "string"
+    },
+    "ExtendedProcessingTask": {
+      "oneOf": [
+        {
+          "$ref": "#"
+        },
+        {
+          "type": "object",
+          "properties": {
+            "comment": {
+              "type": "string"
+            }
+          }
+        }
+      ]
+    },
+    "ExtendedProcessingTasks": {
+      "type": "array",
+      "items": [
+        {
+          "$ref": "#/definitions/ExtendedProcessingTask"
+        }
+      ]
+    },
+    "ProcessingTask": {
+      "type": "object",
+      "properties": {
+        "processing_status_union": {
+          "oneOf": [
+               {
+              "type": "object",
+              "properties": {
+                "id": {
+                  "type": "integer"
+                },
+                "description": {
+                  "type": "string"
+                }
+              }
+            },
+            {
+              "$ref": "#/definitions/ExtendedProcessingTask"
+            },
+            {
+              "$ref": "#/definitions/ProcessingStatus"
+            }
+          ],
+          "default": "COMPLETED"
+        },
+        "processing_status": {
+          "$ref": "#/definitions/ProcessingStatus",
+          "default": "COMPLETED"
+        },
+        "name": {
+          "type": "string"
+        },
+        "kind": {
+          "$ref": "#/definitions/kind"
+        }
+      }
+    }
+  },
+  "type": "array",
+      "items": [
+        {
+          "$ref": "#/definitions/ProcessingTask"
+        }
+      ]
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/additional_properties.yaml 0.45.0-1/tests/data/openapi/additional_properties.yaml
--- 0.26.4-3/tests/data/openapi/additional_properties.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/additional_properties.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,203 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+    Users:
+      type: array
+      items:
+        required:
+          - id
+          - name
+        properties:
+          id:
+            type: integer
+            format: int64
+          name:
+            type: string
+          tag:
+            type: string
+        additionalProperties: true
+    Id:
+      type: string
+    Rules:
+      type: array
+      items:
+        type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+      additionalProperties: false
+    Event:
+      type: object
+      properties:
+        name:
+          type: string
+    Result:
+        type: object
+        properties:
+          event:
+            $ref: '#/components/schemas/Event'
+    test:
+      type: object
+      properties:
+        broken:
+          type: object
+          additionalProperties:
+            $ref: '#/components/schemas/broken'
+        failing:
+          type: object
+          additionalProperties:
+            type: string
+          default: { }
+    broken:
+      type: object
+      properties:
+        foo:
+          type: string
+        bar:
+          type: integer
+    brokenArray:
+      type: object
+      properties:
+        broken:
+          type: object
+          additionalProperties:
+            type: array
+            items:
+              $ref: '#/components/schemas/broken'
+    FileSetUpload:
+      title: FileSetUpload
+      required:
+        - tags
+      type: object
+      properties:
+        task_id:
+            title: 'task id'
+            type: string
+        tags:
+            title: 'Dict of tags, each containing a list of file names'
+            type: object
+            additionalProperties:
+              type: array
+              items:
+                type: string
diff -pruN 0.26.4-3/tests/data/openapi/alias.yaml 0.45.0-1/tests/data/openapi/alias.yaml
--- 0.26.4-3/tests/data/openapi/alias.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/alias.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,312 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      enum:
+        - ca-t
+        - dog*
+      type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    user-name:
+      properties:
+        first-name:
+          type: string
+        home-address:
+          $ref: "#/components/schemas/home-address"
+    home-address:
+      properties:
+        address-1:
+          type: string
+    team-members:
+      items:
+        type: string
+    all-of-ref:
+      allOf:
+        - $ref: "#/components/schemas/user-name"
+        - $ref: "#/components/schemas/home-address"
+    all-of-obj:
+      allOf:
+        - type: object
+          properties:
+            name:
+              type: string
+        - type: object
+          properties:
+            number:
+              type: string
+    all-of-combine:
+      allOf:
+        - $ref: "#/components/schemas/user-name"
+        - type: object
+          properties:
+            birth-date:
+              type: string
+              format: date
+            size:
+              type: integer
+              minimum: 1
+    any-of-combine:
+      allOf:
+        - $ref: "#/components/schemas/home-address"
+        - $ref: "#/components/schemas/user-name"
+        - type: object
+          properties:
+            age:
+              type: string
+    any-of-combine-in-object:
+      type: object
+      properties:
+        item:
+          allOf:
+            - $ref: "#/components/schemas/home-address"
+            - $ref: "#/components/schemas/user-name"
+            - type: object
+              properties:
+                age:
+                  type: string
+    any-of-combine-in-array:
+      type: array
+      items:
+        allOf:
+          - $ref: "#/components/schemas/home-address"
+          - $ref: "#/components/schemas/user-name"
+          - type: object
+            properties:
+              age:
+                type: string
+    any-of-combine-in-root:
+      allOf:
+        - $ref: "#/components/schemas/home-address"
+        - $ref: "#/components/schemas/user-name"
+        - type: object
+          properties:
+            age:
+              type: string
+            birth-date:
+              type: string
+              format: date-time
+    model-s.Specie-s:
+      type: string
+      enum:
+        - dog
+        - cat
+        - snake
+    model-s.Pe-t:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+        species:
+          $ref: '#/components/schemas/model-s.Specie-s'
+    model-s.Use-r:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    collection-s.Pet-s:
+      type: array
+      items:
+        $ref: "#/components/schemas/model-s.Pe-t"
+    collection-s.User-s:
+      type: array
+      items:
+        $ref: "#/components/schemas/model-s.Use-r"
+    Id:
+      type: string
+    collection-s.Rule-s:
+      type: array
+      items:
+        type: string
+    collection-s.api-s:
+      type: array
+      items:
+        type: object
+        properties:
+          apiKey:
+            type: string
+            description: To be used as a dataset parameter value
+          apiVersionNumber:
+            type: string
+            description: To be used as a version parameter value
+          apiUrl:
+            type: string
+            format: uri
+            description: "The URL describing the dataset's fields"
+          apiDocumentationUrl:
+            type: string
+            format: uri
+            description: A URL to the API console for each API
+    model-s.Even-t:
+      type: object
+      properties:
+        name:
+          anyOf:
+            - type: string
+            - type: number
+            - type: integer
+            - type: boolean
+            - type: object
+            - type: array
+              items:
+                type: string
+    Result:
+      type: object
+      properties:
+        event:
+          $ref: '#/components/schemas/model-s.Even-t'
+    fo-o.ba-r.Thin-g:
+      properties:
+        attribute-s:
+          type: object
+    fo-o.ba-r.Than-g:
+      properties:
+        attributes:
+          type: array
+          items:
+            type: object
+    fo-o.ba-r.Clon-e:
+      allOf:
+        - $ref: '#/components/schemas/fo-o.ba-r.Thin-g'
+    fo-o.Te-a:
+      properties:
+        flavour-name:
+          type: string
+        id:
+          $ref: '#/components/schemas/Id'
+    Source:
+      properties:
+        country-name:
+          type: string
+    fo-o.Coco-a:
+      properties:
+        quality:
+          type: integer
+    wo-o.bo-o.Chocolat-e:
+      properties:
+        flavour-name:
+          type: string
+        sourc-e:
+          $ref: '#/components/schemas/Source'
+        coco-a:
+          $ref: '#/components/schemas/fo-o.Coco-a'
diff -pruN 0.26.4-3/tests/data/openapi/aliases.json 0.45.0-1/tests/data/openapi/aliases.json
--- 0.26.4-3/tests/data/openapi/aliases.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/aliases.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,4 @@
+{
+  "name": "name_",
+  "id": "id_"
+}
diff -pruN 0.26.4-3/tests/data/openapi/all_exports_local_collision.yaml 0.45.0-1/tests/data/openapi/all_exports_local_collision.yaml
--- 0.26.4-3/tests/data/openapi/all_exports_local_collision.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/all_exports_local_collision.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Local Collision Test
+paths: {}
+components:
+  schemas:
+    Message:
+      description: Local model in __init__.py
+      type: object
+      properties:
+        id:
+          type: string
+    child.Message:
+      description: Child model that collides with local Message
+      type: object
+      properties:
+        content:
+          type: string
diff -pruN 0.26.4-3/tests/data/openapi/all_exports_no_child.yaml 0.45.0-1/tests/data/openapi/all_exports_no_child.yaml
--- 0.26.4-3/tests/data/openapi/all_exports_no_child.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/all_exports_no_child.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: No Child Test
+paths: {}
+components:
+  schemas:
+    parent.ParentModel:
+      description: Model in parent __init__.py
+      type: object
+      properties:
+        id:
+          type: string
+    parent.child.deep.DeepModel:
+      description: Model in deep nested module
+      type: object
+      properties:
+        name:
+          type: string
diff -pruN 0.26.4-3/tests/data/openapi/all_exports_prefix_collision.yaml 0.45.0-1/tests/data/openapi/all_exports_prefix_collision.yaml
--- 0.26.4-3/tests/data/openapi/all_exports_prefix_collision.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/all_exports_prefix_collision.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,22 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Prefix Collision Test
+paths: {}
+components:
+  schemas:
+    InputMessage:
+      type: object
+      properties:
+        id:
+          type: string
+    input.Message:
+      type: object
+      properties:
+        content:
+          type: string
+    output.Message:
+      type: object
+      properties:
+        result:
+          type: string
diff -pruN 0.26.4-3/tests/data/openapi/all_exports_with_local_models.yaml 0.45.0-1/tests/data/openapi/all_exports_with_local_models.yaml
--- 0.26.4-3/tests/data/openapi/all_exports_with_local_models.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/all_exports_with_local_models.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: All Exports With Local Models
+paths: {}
+components:
+  schemas:
+    pkg.LocalModel:
+      description: Local model in pkg __init__.py
+      type: object
+      properties:
+        id:
+          type: string
+    pkg.AnotherLocal:
+      description: Another local model
+      type: object
+      properties:
+        name:
+          type: string
+    pkg.sub.ChildModel:
+      description: Model in child module
+      type: object
+      properties:
+        value:
+          type: integer
diff -pruN 0.26.4-3/tests/data/openapi/all_of_with_relative_ref/openapi.yaml 0.45.0-1/tests/data/openapi/all_of_with_relative_ref/openapi.yaml
--- 0.26.4-3/tests/data/openapi/all_of_with_relative_ref/openapi.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/all_of_with_relative_ref/openapi.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+openapi: 3.1.0
+paths: {}
+components:
+  schemas:
+    Animals:
+      $ref: ./schema/animal.yaml
+    Pets:
+      $ref: ./schema/pet.yaml
+servers:
+  - url: /api
+info:
+  title: Example
+  version: "1.0"
+  description: Example API
+tags:
+  - name: Animals
+    description: Information about Animals.
+  - name: Pets
+    description: Information about Pets.
diff -pruN 0.26.4-3/tests/data/openapi/all_of_with_relative_ref/schema/animal.yaml 0.45.0-1/tests/data/openapi/all_of_with_relative_ref/schema/animal.yaml
--- 0.26.4-3/tests/data/openapi/all_of_with_relative_ref/schema/animal.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/all_of_with_relative_ref/schema/animal.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+title: Animal
+type: object
+additionalProperties: false
+properties:
+  kind:
+    type: string
+    description: The kind of the animal
+    enum:
+      - CAT
+      - DOG
diff -pruN 0.26.4-3/tests/data/openapi/all_of_with_relative_ref/schema/pet.yaml 0.45.0-1/tests/data/openapi/all_of_with_relative_ref/schema/pet.yaml
--- 0.26.4-3/tests/data/openapi/all_of_with_relative_ref/schema/pet.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/all_of_with_relative_ref/schema/pet.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+title: Pet
+type: object
+additionalProperties: false
+properties:
+  kind:
+    description: The kind of the pet
+    type: string
+    allOf:
+      - $ref: ./animal.yaml#/properties/kind
diff -pruN 0.26.4-3/tests/data/openapi/allof.yaml 0.45.0-1/tests/data/openapi/allof.yaml
--- 0.26.4-3/tests/data/openapi/allof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,225 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Car:
+      required:
+        - number
+      properties:
+        number:
+          type: string
+    AllOfref:
+      allOf:
+        - $ref: "#/components/schemas/Pet"
+        - $ref: "#/components/schemas/Car"
+    AllOfNested1:
+      allOf:
+       - $ref: "#/components/schemas/AllOfNested2"
+       - type: object
+         properties:
+           name:
+             $ref: "#/components/schemas/AllOfCombine"
+    AllOfNested2:
+      allOf:
+       - $ref: "#/components/schemas/AllOfNested3"
+       - type: object
+         properties:
+           name:
+             $ref: "#/components/schemas/AllOfNested1"
+    AllOfNested3:
+      allOf:
+       - $ref: "#/components/schemas/AllOfCombine"
+       - type: object
+         properties:
+           name:
+             $ref: "#/components/schemas/AnyOfCombine"
+    AllOfobj:
+      allOf:
+        - type: object
+          properties:
+            name:
+              type: string
+        - type: object
+          properties:
+            number:
+              type: string
+    AllOfCombine:
+      allOf:
+        - $ref: "#/components/schemas/Pet"
+        - type: object
+          properties:
+            birthdate:
+              type: string
+              format: date
+            size:
+              type: integer
+              minimum: 1
+    AnyOfCombine:
+        allOf:
+          - $ref: "#/components/schemas/Pet"
+          - $ref: "#/components/schemas/Car"
+          - type: object
+            properties:
+              age:
+                type: string
+    AnyOfCombineInObject:
+      type: object
+      properties:
+        item:
+          allOf:
+            - $ref: "#/components/schemas/Pet"
+            - $ref: "#/components/schemas/Car"
+            - type: object
+              properties:
+                age:
+                  type: string
+    AnyOfCombineInArray:
+      type: array
+      items:
+        allOf:
+          - $ref: "#/components/schemas/Pet"
+          - $ref: "#/components/schemas/Car"
+          - type: object
+            properties:
+              age:
+                type: string
+    AnyOfCombineInRoot:
+      allOf:
+        - $ref: "#/components/schemas/Pet"
+        - $ref: "#/components/schemas/Car"
+        - type: object
+          properties:
+            age:
+              type: string
+            birthdate:
+              type: string
+              format: date-time
+    AnyOfCombineUnknownObjectInRoot:
+      type: array
+      items:
+        allOf:
+          - $ref: "#/components/schemas/Pet"
+          - description: 'TODO'
+    AnyOfCombineUnknownObjectInArray:
+      allOf:
+        - $ref: "#/components/schemas/Pet"
+        - description: 'TODO'
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/allof_enum_ref.yaml 0.45.0-1/tests/data/openapi/allof_enum_ref.yaml
--- 0.26.4-3/tests/data/openapi/allof_enum_ref.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_enum_ref.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+openapi: 3.0.1
+info:
+  title: Test API
+  version: 1.0.0
+paths: {}
+components:
+  schemas:
+    CreateOrderByEstimateRequest:
+      type: object
+      properties:
+        quantity_trunc:
+          $ref: '#/components/schemas/QuantityTrunc'
+    QuantityTrunc:
+      type: string
+      description: Quantity truncation setting
+      allOf:
+        - $ref: '#/components/schemas/MassUnit'
+    MassUnit:
+      type: string
+      enum:
+        - g
+        - kg
+        - t
diff -pruN 0.26.4-3/tests/data/openapi/allof_materialize_defaults.yaml 0.45.0-1/tests/data/openapi/allof_materialize_defaults.yaml
--- 0.26.4-3/tests/data/openapi/allof_materialize_defaults.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_materialize_defaults.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+openapi: "3.0.0"
+info:
+  title: Test materialize allOf defaults
+  version: "1.0.0"
+components:
+  schemas:
+    Parent:
+      type: object
+      properties:
+        name:
+          type: string
+          default: "parent_default"
+          minLength: 1
+        count:
+          type: integer
+          default: 10
+          minimum: 0
+    Child:
+      allOf:
+        - $ref: "#/components/schemas/Parent"
+        - type: object
+          properties:
+            name:
+              maxLength: 100
+            count:
+              maximum: 1000
diff -pruN 0.26.4-3/tests/data/openapi/allof_merge_mode_none.yaml 0.45.0-1/tests/data/openapi/allof_merge_mode_none.yaml
--- 0.26.4-3/tests/data/openapi/allof_merge_mode_none.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_merge_mode_none.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+openapi: "3.0.0"
+info:
+  title: Test allof-merge-mode none
+  version: "1.0.0"
+components:
+  schemas:
+    Parent:
+      type: object
+      properties:
+        name:
+          type: string
+          minLength: 1
+          default: "parent_default"
+    Child:
+      allOf:
+        - $ref: "#/components/schemas/Parent"
+        - type: object
+          properties:
+            name:
+              maxLength: 100
diff -pruN 0.26.4-3/tests/data/openapi/allof_multiple_parents_same_property.yaml 0.45.0-1/tests/data/openapi/allof_multiple_parents_same_property.yaml
--- 0.26.4-3/tests/data/openapi/allof_multiple_parents_same_property.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_multiple_parents_same_property.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,31 @@
+openapi: "3.0.0"
+info:
+  title: Test allOf with multiple parents having same property
+  version: "1.0.0"
+components:
+  schemas:
+    Parent1:
+      type: object
+      properties:
+        name:
+          type: string
+          minLength: 1
+    Parent2:
+      type: object
+      properties:
+        name:
+          type: string
+          minLength: 5
+        age:
+          type: integer
+          minimum: 0
+    Child:
+      allOf:
+        - $ref: "#/components/schemas/Parent1"
+        - $ref: "#/components/schemas/Parent2"
+        - type: object
+          properties:
+            name:
+              maxLength: 100
+            age:
+              maximum: 150
diff -pruN 0.26.4-3/tests/data/openapi/allof_parent_bool_property.yaml 0.45.0-1/tests/data/openapi/allof_parent_bool_property.yaml
--- 0.26.4-3/tests/data/openapi/allof_parent_bool_property.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_parent_bool_property.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+openapi: "3.1.0"
+info:
+  title: Test allOf with parent having bool property schema
+  version: "1.0.0"
+components:
+  schemas:
+    Parent:
+      type: object
+      properties:
+        name:
+          type: string
+          minLength: 1
+        allowed: true
+    Child:
+      allOf:
+        - $ref: "#/components/schemas/Parent"
+        - type: object
+          properties:
+            name:
+              maxLength: 100
diff -pruN 0.26.4-3/tests/data/openapi/allof_parent_no_properties.yaml 0.45.0-1/tests/data/openapi/allof_parent_no_properties.yaml
--- 0.26.4-3/tests/data/openapi/allof_parent_no_properties.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_parent_no_properties.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+openapi: "3.0.0"
+info:
+  title: Test allOf with parent having no properties
+  version: "1.0.0"
+components:
+  schemas:
+    EmptyParent:
+      type: object
+      description: A parent with no properties
+    Child:
+      allOf:
+        - $ref: "#/components/schemas/EmptyParent"
+        - type: object
+          properties:
+            name:
+              type: string
+              maxLength: 100
diff -pruN 0.26.4-3/tests/data/openapi/allof_partial_override_array_items.yaml 0.45.0-1/tests/data/openapi/allof_partial_override_array_items.yaml
--- 0.26.4-3/tests/data/openapi/allof_partial_override_array_items.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_partial_override_array_items.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    Thing:
+      type: object
+      properties:
+        type:
+          type: string
+          default: playground:Thing
+        type_list:
+          type: array
+          default:
+            - playground:Thing
+          items:
+            type: string
+    Person:
+      allOf:
+        - $ref: "#/components/schemas/Thing"
+        - type: object
+          properties:
+            type:
+              default: playground:Person
+            type_list:
+              default:
+                - playground:Person
+              items:
+                title: A type entry
diff -pruN 0.26.4-3/tests/data/openapi/allof_partial_override_array_items_no_parent.yaml 0.45.0-1/tests/data/openapi/allof_partial_override_array_items_no_parent.yaml
--- 0.26.4-3/tests/data/openapi/allof_partial_override_array_items_no_parent.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_partial_override_array_items_no_parent.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    Thing:
+      type: object
+      properties:
+        name:
+          type: string
+    Person:
+      allOf:
+        - $ref: "#/components/schemas/Thing"
+        - type: object
+          properties:
+            tags:
+              type: array
+              default:
+                - tag1
+              items:
+                title: A tag entry
diff -pruN 0.26.4-3/tests/data/openapi/allof_partial_override_deeply_nested_array.yaml 0.45.0-1/tests/data/openapi/allof_partial_override_deeply_nested_array.yaml
--- 0.26.4-3/tests/data/openapi/allof_partial_override_deeply_nested_array.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_partial_override_deeply_nested_array.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,30 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    Thing:
+      type: object
+      properties:
+        cube:
+          type: array
+          default:
+            - - - a
+          items:
+            type: array
+            items:
+              type: array
+              items:
+                type: string
+    Person:
+      allOf:
+        - $ref: "#/components/schemas/Thing"
+        - type: object
+          properties:
+            cube:
+              default:
+                - - - b
+              items:
+                title: A plane
+                items:
+                  title: A row
+                  items:
+                    title: A cell
diff -pruN 0.26.4-3/tests/data/openapi/allof_partial_override_inherited_types.yaml 0.45.0-1/tests/data/openapi/allof_partial_override_inherited_types.yaml
--- 0.26.4-3/tests/data/openapi/allof_partial_override_inherited_types.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_partial_override_inherited_types.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    Thing:
+      type: object
+      properties:
+        type:
+          type: string
+        name:
+          type: string
+          description: First and Last name
+    Person:
+      allOf:
+        - $ref: "#/components/schemas/Thing"
+        - type: object
+          properties:
+            type:
+              default: playground:Person
+            name:
+              description: First and Last name
+            age:
+              type: integer
+          required:
+            - name
diff -pruN 0.26.4-3/tests/data/openapi/allof_partial_override_nested_array_items.yaml 0.45.0-1/tests/data/openapi/allof_partial_override_nested_array_items.yaml
--- 0.26.4-3/tests/data/openapi/allof_partial_override_nested_array_items.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_partial_override_nested_array_items.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,28 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    Thing:
+      type: object
+      properties:
+        matrix:
+          type: array
+          default:
+            - - a
+              - b
+          items:
+            type: array
+            items:
+              type: string
+    Person:
+      allOf:
+        - $ref: "#/components/schemas/Thing"
+        - type: object
+          properties:
+            matrix:
+              default:
+                - - c
+                  - d
+              items:
+                title: A row
+                items:
+                  title: A cell
diff -pruN 0.26.4-3/tests/data/openapi/allof_partial_override_non_array_field.yaml 0.45.0-1/tests/data/openapi/allof_partial_override_non_array_field.yaml
--- 0.26.4-3/tests/data/openapi/allof_partial_override_non_array_field.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_partial_override_non_array_field.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    Thing:
+      type: object
+      properties:
+        name:
+          type: string
+          default: default_name
+        count:
+          type: integer
+          default: 0
+    Person:
+      allOf:
+        - $ref: "#/components/schemas/Thing"
+        - type: object
+          properties:
+            name:
+              title: Person name
+            count:
+              description: Count value
diff -pruN 0.26.4-3/tests/data/openapi/allof_partial_override_simple_list_any.yaml 0.45.0-1/tests/data/openapi/allof_partial_override_simple_list_any.yaml
--- 0.26.4-3/tests/data/openapi/allof_partial_override_simple_list_any.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_partial_override_simple_list_any.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+openapi: "3.0.0"
+info:
+  title: Test Simple List[Any]
+  version: "1.0.0"
+components:
+  schemas:
+    Parent:
+      type: object
+      properties:
+        items:
+          type: array
+          items:
+            type: string
+    Child:
+      allOf:
+        - $ref: "#/components/schemas/Parent"
+        - type: object
+          properties:
+            items:
+              items:
+                title: An item
diff -pruN 0.26.4-3/tests/data/openapi/allof_partial_override_unique_items.yaml 0.45.0-1/tests/data/openapi/allof_partial_override_unique_items.yaml
--- 0.26.4-3/tests/data/openapi/allof_partial_override_unique_items.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_partial_override_unique_items.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+openapi: "3.0.0"
+info:
+  title: Test uniqueItems inheritance in allOf
+  version: "1.0.0"
+components:
+  schemas:
+    Thing:
+      type: object
+      properties:
+        tags:
+          type: array
+          uniqueItems: true
+          items:
+            type: string
+    Person:
+      allOf:
+        - $ref: "#/components/schemas/Thing"
+        - type: object
+          properties:
+            tags:
+              default:
+                - tag2
+              items:
+                title: A tag entry
diff -pruN 0.26.4-3/tests/data/openapi/allof_property_bool_schema.yaml 0.45.0-1/tests/data/openapi/allof_property_bool_schema.yaml
--- 0.26.4-3/tests/data/openapi/allof_property_bool_schema.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_property_bool_schema.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+openapi: "3.1.0"
+info:
+  title: Test allOf with bool property schema
+  version: "1.0.0"
+components:
+  schemas:
+    Parent:
+      type: object
+      properties:
+        name:
+          type: string
+          minLength: 1
+    Child:
+      allOf:
+        - $ref: "#/components/schemas/Parent"
+        - type: object
+          properties:
+            name:
+              maxLength: 100
+            allowed: true
diff -pruN 0.26.4-3/tests/data/openapi/allof_required.yaml 0.45.0-1/tests/data/openapi/allof_required.yaml
--- 0.26.4-3/tests/data/openapi/allof_required.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_required.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,30 @@
+openapi: 3.0.0
+info:
+  title: "no title"
+  version: "no version"
+paths: {}
+components:
+  schemas:
+    Foo:
+      type: object
+      required:
+        - a
+        - b
+      properties:
+        a:
+          type: string
+        b:
+          type: string
+
+    Bar:
+      type: object
+      properties:
+        type:
+          type: string
+          pattern: service
+        name:
+          type: string
+      allOf:
+        - $ref: '#/components/schemas/Foo'
+        - required:
+            - type
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/allof_required_fields.yaml 0.45.0-1/tests/data/openapi/allof_required_fields.yaml
--- 0.26.4-3/tests/data/openapi/allof_required_fields.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_required_fields.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    EmailMessage:
+      title: Email message
+      description: |
+        An email message. There must be at least one recipient in `to`, `cc`, or `bcc`.
+      type: object
+      required:
+        - allOf:
+            - message
+            - subject
+            - to
+      properties:
+        message:
+          type: string
+          description: The email message text.
+        subject:
+          type: string
+          description: The subject line of the email.
+        to:
+          type: array
+          description: A list of email addresses.
+          items:
+            type: string
diff -pruN 0.26.4-3/tests/data/openapi/allof_same_prefix_with_ref.yaml 0.45.0-1/tests/data/openapi/allof_same_prefix_with_ref.yaml
--- 0.26.4-3/tests/data/openapi/allof_same_prefix_with_ref.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_same_prefix_with_ref.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,30 @@
+openapi: 3.0.3
+info:
+  title: Foo
+  version: "1.0"
+paths:
+  /:
+    get:
+      responses:
+        '200':
+          description: ''
+components:
+  schemas:
+    Foo:
+      type: object
+      properties:
+        foo_bar:
+          allOf:
+          - $ref: '#/components/schemas/FooBarBaz'
+
+    FooBar:
+      type: object
+      properties:
+        id:
+          type: integer
+
+    FooBarBaz:
+      type: object
+      properties:
+        id:
+          type: integer
diff -pruN 0.26.4-3/tests/data/openapi/allof_with_anyof_ref.yaml 0.45.0-1/tests/data/openapi/allof_with_anyof_ref.yaml
--- 0.26.4-3/tests/data/openapi/allof_with_anyof_ref.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_with_anyof_ref.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,59 @@
+# Test case for allOf referencing a schema with anyOf
+# This tests the anyOf branch in _parse_all_of_item
+
+openapi: 3.0.0
+info:
+  title: Test API
+  version: 1.0.0
+paths:
+  /items:
+    post:
+      summary: Create an item
+      requestBody:
+        required: true
+        content:
+          application/json:
+            schema:
+              $ref: '#/components/schemas/ItemPostRequest'
+      responses:
+        '200':
+          description: Success
+components:
+  schemas:
+    TextItem:
+      type: object
+      required:
+        - itemType
+        - text
+      properties:
+        itemType:
+          type: string
+          enum:
+            - text
+        text:
+          type: string
+    NumberItem:
+      type: object
+      required:
+        - itemType
+        - value
+      properties:
+        itemType:
+          type: string
+          enum:
+            - number
+        value:
+          type: integer
+    Item:
+      anyOf:
+        - $ref: '#/components/schemas/TextItem'
+        - $ref: '#/components/schemas/NumberItem'
+    ItemPostRequest:
+      allOf:
+        - $ref: '#/components/schemas/Item'
+        - type: object
+          required:
+            - itemId
+          properties:
+            itemId:
+              type: string
diff -pruN 0.26.4-3/tests/data/openapi/allof_with_description_only.yaml 0.45.0-1/tests/data/openapi/allof_with_description_only.yaml
--- 0.26.4-3/tests/data/openapi/allof_with_description_only.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_with_description_only.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+openapi: "3.0.0"
+info:
+  title: Test API
+  version: "1.0.0"
+paths: {}
+components:
+  schemas:
+    MyModel:
+      type: object
+      description: A model that has a description.
+      properties:
+        name:
+          type: string
+    MyOtherModel:
+      allOf:
+        - $ref: "#/components/schemas/MyModel"
+      description: Another model that should also have a description.
diff -pruN 0.26.4-3/tests/data/openapi/allof_with_oneof_ref.yaml 0.45.0-1/tests/data/openapi/allof_with_oneof_ref.yaml
--- 0.26.4-3/tests/data/openapi/allof_with_oneof_ref.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_with_oneof_ref.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,64 @@
+# Test case for issue #1763
+# allOf referencing a schema with oneOf + discriminator
+
+openapi: 3.0.0
+info:
+  title: Test API
+  version: 1.0.0
+paths:
+  /users:
+    post:
+      summary: Create a user
+      requestBody:
+        required: true
+        content:
+          application/json:
+            schema:
+              $ref: '#/components/schemas/UserPostRequest'
+      responses:
+        '200':
+          description: Success
+components:
+  schemas:
+    AdminUser:
+      type: object
+      required:
+        - userType
+        - adminLevel
+      properties:
+        userType:
+          type: string
+          enum:
+            - admin
+        adminLevel:
+          type: integer
+    RegularUser:
+      type: object
+      required:
+        - userType
+        - username
+      properties:
+        userType:
+          type: string
+          enum:
+            - regular
+        username:
+          type: string
+    User:
+      oneOf:
+        - $ref: '#/components/schemas/AdminUser'
+        - $ref: '#/components/schemas/RegularUser'
+      discriminator:
+        propertyName: userType
+        mapping:
+          admin: '#/components/schemas/AdminUser'
+          regular: '#/components/schemas/RegularUser'
+    UserPostRequest:
+      allOf:
+        - $ref: '#/components/schemas/User'
+        - type: object
+          required:
+            - userId
+          properties:
+            userId:
+              type: string
diff -pruN 0.26.4-3/tests/data/openapi/allof_with_required_inherited_complex_allof.yaml 0.45.0-1/tests/data/openapi/allof_with_required_inherited_complex_allof.yaml
--- 0.26.4-3/tests/data/openapi/allof_with_required_inherited_complex_allof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_with_required_inherited_complex_allof.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+openapi: "3.0.0"
+info:
+  title: Test API - Complex allOf Inheritance
+  version: 1.0.0
+paths: {}
+components:
+  schemas:
+    StringConstraint:
+      type: string
+      minLength: 1
+
+    NumberConstraint:
+      type: number
+      minimum: 0
+
+    BaseConfig:
+      type: object
+      properties:
+        name:
+          type: string
+        enabled:
+          type: boolean
+
+    ExtendedConfig:
+      type: object
+      properties:
+        timeout:
+          type: integer
+
+    ProjectedItem:
+      type: object
+      properties:
+        id:
+          type: integer
+        code:
+          allOf:
+            - $ref: '#/components/schemas/StringConstraint'
+            - maxLength: 10
+        score:
+          allOf:
+            - type: number
+            - minimum: 0
+            - maximum: 100
+        config:
+          allOf:
+            - $ref: '#/components/schemas/BaseConfig'
+            - $ref: '#/components/schemas/ExtendedConfig'
+        metadata:
+          allOf:
+            - type: object
+              additionalProperties:
+                type: string
+            - type: object
+              additionalProperties:
+                minLength: 1
+
+    Item:
+      allOf:
+        - $ref: '#/components/schemas/ProjectedItem'
+        - type: object
+          required:
+            - id
+            - code
+            - score
+            - config
+            - metadata
+          properties:
+            extra:
+              type: string
diff -pruN 0.26.4-3/tests/data/openapi/allof_with_required_inherited_comprehensive.yaml 0.45.0-1/tests/data/openapi/allof_with_required_inherited_comprehensive.yaml
--- 0.26.4-3/tests/data/openapi/allof_with_required_inherited_comprehensive.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_with_required_inherited_comprehensive.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,126 @@
+openapi: "3.0.0"
+info:
+  title: Test API - Comprehensive allOf Inheritance Coverage
+  version: 1.0.0
+paths: {}
+components:
+  schemas:
+    Status:
+      type: string
+      enum:
+        - active
+        - inactive
+
+    BaseType:
+      type: object
+      properties:
+        id:
+          type: integer
+
+    ProjectedEntity:
+      type: object
+      properties:
+        primitive_string:
+          type: string
+        primitive_int:
+          type: integer
+        primitive_number:
+          type: number
+        primitive_bool:
+          type: boolean
+        ref_field:
+          $ref: '#/components/schemas/BaseType'
+        enum_field:
+          $ref: '#/components/schemas/Status'
+        array_with_ref:
+          type: array
+          items:
+            $ref: '#/components/schemas/BaseType'
+        array_with_primitive:
+          type: array
+          items:
+            type: string
+        object_with_props:
+          type: object
+          properties:
+            nested:
+              type: string
+        object_with_additional:
+          type: object
+          additionalProperties:
+            type: integer
+        anyof_field:
+          anyOf:
+            - type: string
+            - type: integer
+        oneof_field:
+          oneOf:
+            - type: boolean
+            - type: number
+        allof_single_ref:
+          allOf:
+            - $ref: '#/components/schemas/BaseType'
+        allof_multiple_refs:
+          allOf:
+            - $ref: '#/components/schemas/BaseType'
+            - $ref: '#/components/schemas/BaseType'
+        allof_primitives_with_constraints:
+          allOf:
+            - type: string
+              minLength: 1
+            - minLength: 5
+              maxLength: 100
+        allof_with_pattern:
+          allOf:
+            - type: string
+              pattern: "^[a-z]+"
+            - pattern: "[0-9]$"
+        allof_with_unique:
+          allOf:
+            - type: array
+              items:
+                type: string
+            - uniqueItems: true
+        type_list:
+          type:
+            - string
+            - "null"
+        deep_nested:
+          type: object
+          properties:
+            level1:
+              type: object
+              properties:
+                level2:
+                  type: object
+                  properties:
+                    level3:
+                      type: string
+
+    Entity:
+      allOf:
+        - $ref: '#/components/schemas/ProjectedEntity'
+        - type: object
+          required:
+            - primitive_string
+            - primitive_int
+            - primitive_number
+            - primitive_bool
+            - ref_field
+            - enum_field
+            - array_with_ref
+            - array_with_primitive
+            - object_with_props
+            - object_with_additional
+            - anyof_field
+            - oneof_field
+            - allof_single_ref
+            - allof_multiple_refs
+            - allof_primitives_with_constraints
+            - allof_with_pattern
+            - allof_with_unique
+            - type_list
+            - deep_nested
+          properties:
+            extra:
+              type: string
diff -pruN 0.26.4-3/tests/data/openapi/allof_with_required_inherited_coverage.yaml 0.45.0-1/tests/data/openapi/allof_with_required_inherited_coverage.yaml
--- 0.26.4-3/tests/data/openapi/allof_with_required_inherited_coverage.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_with_required_inherited_coverage.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,224 @@
+openapi: "3.0.0"
+info:
+  title: Test API - Coverage for allOf Inheritance Edge Cases
+  version: 1.0.0
+paths: {}
+components:
+  schemas:
+    # For testing _merge_primitive_schemas with constraint-only items (no type field)
+    ConstraintOnlyBase:
+      type: object
+      properties:
+        field_with_constraints:
+          minLength: 5
+          maxLength: 50
+
+    # For testing default case in _intersect_constraint (multipleOf)
+    MultipleOfBase:
+      type: object
+      properties:
+        multiple_field:
+          allOf:
+            - type: integer
+              multipleOf: 5
+            - multipleOf: 10
+
+    # For testing warning when $ref combined with primitives
+    RefWithPrimitiveBase:
+      type: object
+      properties:
+        ref_and_primitive:
+          allOf:
+            - $ref: '#/components/schemas/SimpleString'
+            - type: string
+              minLength: 1
+
+    SimpleString:
+      type: string
+
+    # For testing allOf with nested anyOf that has refs
+    NestedAnyOfWithRef:
+      type: object
+      properties:
+        nested_anyof:
+          anyOf:
+            - $ref: '#/components/schemas/SimpleString'
+            - type: integer
+
+    # For testing array items falling back to Any
+    DeepNestedArray:
+      type: object
+      properties:
+        deep_array:
+          type: array
+          items:
+            type: object
+            properties:
+              l1:
+                type: object
+                properties:
+                  l2:
+                    type: object
+                    properties:
+                      l3:
+                        type: object
+                        properties:
+                          l4:
+                            type: object
+                            properties:
+                              l5:
+                                type: string
+
+    # For testing oneOf path
+    OneOfBase:
+      type: object
+      properties:
+        oneof_field:
+          oneOf:
+            - type: string
+            - type: integer
+
+    # For testing oneOf with single item
+    SingleOneOf:
+      type: object
+      properties:
+        single_oneof:
+          oneOf:
+            - type: string
+
+    # For testing additionalProperties fallback to Any
+    AdditionalPropsDeep:
+      type: object
+      properties:
+        deep_dict:
+          type: object
+          additionalProperties:
+            type: object
+            properties:
+              l1:
+                type: object
+                properties:
+                  l2:
+                    type: object
+                    properties:
+                      l3:
+                        type: object
+                        properties:
+                          l4:
+                            type: string
+
+    # For testing object with allOf containing only objects without additionalProperties
+    ObjectOnlyAllOf:
+      type: object
+      properties:
+        object_allof:
+          allOf:
+            - type: object
+              properties:
+                a:
+                  type: string
+            - type: object
+              properties:
+                b:
+                  type: integer
+
+    # For testing anyOf with more than max_union_elements
+    LargeUnion:
+      type: object
+      properties:
+        large_union:
+          anyOf:
+            - type: string
+            - type: integer
+            - type: boolean
+            - type: number
+            - type: array
+              items:
+                type: string
+            - type: object
+
+    # For _get_inherited_field_type edge cases
+    BaseWithNoProperties:
+      type: object
+
+    BaseWithBooleanProperty:
+      type: object
+      properties:
+        bool_prop:
+          type: boolean
+
+    # Child that inherits from base without properties
+    ChildOfNoProps:
+      allOf:
+        - $ref: '#/components/schemas/BaseWithNoProperties'
+        - type: object
+          required:
+            - some_field
+          properties:
+            extra:
+              type: string
+
+    # For testing nested allOf with ref that resolves to a nested type with reference
+    NestedAllOfRef:
+      type: object
+      properties:
+        nested_allof_ref:
+          allOf:
+            - allOf:
+                - $ref: '#/components/schemas/SimpleString'
+
+    # For testing nested allOf that doesn't have ref but resolves to ref
+    # This should trigger the path where nested_type.reference is True but item.ref is None
+    NestedAllOfWithoutDirectRef:
+      type: object
+      properties:
+        nested_indirect:
+          allOf:
+            - oneOf:
+                - $ref: '#/components/schemas/SimpleString'
+            - type: string
+              minLength: 1
+
+    # For testing enum items in allOf
+    EnumInAllOf:
+      type: object
+      properties:
+        enum_field:
+          allOf:
+            - enum: ["a", "b", "c"]
+            - description: "enum with description"
+
+    # Main test schema that inherits all edge cases
+    EdgeCasesCoverage:
+      allOf:
+        - $ref: '#/components/schemas/ConstraintOnlyBase'
+        - $ref: '#/components/schemas/MultipleOfBase'
+        - $ref: '#/components/schemas/RefWithPrimitiveBase'
+        - $ref: '#/components/schemas/NestedAnyOfWithRef'
+        - $ref: '#/components/schemas/DeepNestedArray'
+        - $ref: '#/components/schemas/OneOfBase'
+        - $ref: '#/components/schemas/SingleOneOf'
+        - $ref: '#/components/schemas/AdditionalPropsDeep'
+        - $ref: '#/components/schemas/ObjectOnlyAllOf'
+        - $ref: '#/components/schemas/LargeUnion'
+        - $ref: '#/components/schemas/NestedAllOfRef'
+        - $ref: '#/components/schemas/NestedAllOfWithoutDirectRef'
+        - $ref: '#/components/schemas/EnumInAllOf'
+        - type: object
+          required:
+            - field_with_constraints
+            - multiple_field
+            - ref_and_primitive
+            - nested_anyof
+            - deep_array
+            - oneof_field
+            - single_oneof
+            - deep_dict
+            - object_allof
+            - large_union
+            - nested_allof_ref
+            - nested_indirect
+            - enum_field
+          properties:
+            local_field:
+              type: string
diff -pruN 0.26.4-3/tests/data/openapi/allof_with_required_inherited_edge_cases.yaml 0.45.0-1/tests/data/openapi/allof_with_required_inherited_edge_cases.yaml
--- 0.26.4-3/tests/data/openapi/allof_with_required_inherited_edge_cases.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_with_required_inherited_edge_cases.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,116 @@
+openapi: "3.0.0"
+info:
+  title: Test API - Edge Cases for allOf Inheritance
+  version: 1.0.0
+paths: {}
+components:
+  schemas:
+    BaseRef:
+      type: object
+      properties:
+        id:
+          type: integer
+
+    StatusEnum:
+      type: string
+      enum:
+        - active
+        - inactive
+
+    ProjectedEdgeCases:
+      type: object
+      properties:
+        single_allof_primitive:
+          allOf:
+            - type: string
+        single_allof_ref:
+          allOf:
+            - $ref: '#/components/schemas/BaseRef'
+        allof_with_enum:
+          allOf:
+            - $ref: '#/components/schemas/StatusEnum'
+            - description: "enum with description"
+        allof_nested_anyof:
+          allOf:
+            - anyOf:
+                - type: string
+                - type: integer
+            - description: "nested anyOf"
+        allof_constraint_only:
+          allOf:
+            - minLength: 1
+            - maxLength: 100
+        allof_max_constraints:
+          allOf:
+            - type: integer
+              maximum: 100
+            - maximum: 50
+        allof_unique_items:
+          allOf:
+            - type: array
+              items:
+                type: string
+              uniqueItems: true
+            - uniqueItems: false
+        object_without_additional:
+          type: object
+          properties:
+            nested:
+              type: string
+        object_only_type:
+          type: object
+        multiple_additional_props:
+          allOf:
+            - type: object
+              additionalProperties:
+                type: string
+            - type: object
+              additionalProperties:
+                $ref: '#/components/schemas/BaseRef'
+        depth_limit_test:
+          type: object
+          properties:
+            l1:
+              type: object
+              properties:
+                l2:
+                  type: object
+                  properties:
+                    l3:
+                      type: object
+                      properties:
+                        l4:
+                          type: string
+        cycle_detection:
+          $ref: '#/components/schemas/BaseRef'
+        type_list_field:
+          type:
+            - string
+            - integer
+        allof_multiple_refs_only:
+          allOf:
+            - $ref: '#/components/schemas/BaseRef'
+            - $ref: '#/components/schemas/BaseRef'
+
+    EdgeCases:
+      allOf:
+        - $ref: '#/components/schemas/ProjectedEdgeCases'
+        - type: object
+          required:
+            - single_allof_primitive
+            - single_allof_ref
+            - allof_with_enum
+            - allof_nested_anyof
+            - allof_constraint_only
+            - allof_max_constraints
+            - allof_unique_items
+            - object_without_additional
+            - object_only_type
+            - multiple_additional_props
+            - depth_limit_test
+            - cycle_detection
+            - type_list_field
+            - allof_multiple_refs_only
+          properties:
+            extra:
+              type: string
diff -pruN 0.26.4-3/tests/data/openapi/allof_with_required_inherited_fields.yaml 0.45.0-1/tests/data/openapi/allof_with_required_inherited_fields.yaml
--- 0.26.4-3/tests/data/openapi/allof_with_required_inherited_fields.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_with_required_inherited_fields.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+openapi: "3.0.0"
+info:
+  title: Test API
+  version: 1.0.0
+paths: {}
+components:
+  schemas:
+    ProjectedPet:
+      type: object
+      properties:
+        id:
+          type: integer
+        name:
+          type: string
+    Pet:
+      allOf:
+        - $ref: '#/components/schemas/ProjectedPet'
+        - type: object
+          required:
+            - id
+            - name
+            - opts
+          properties:
+            tag:
+              type: string
+            opts:
+              type: array
+              items:
+                type: string
diff -pruN 0.26.4-3/tests/data/openapi/allof_with_required_inherited_nested_object.yaml 0.45.0-1/tests/data/openapi/allof_with_required_inherited_nested_object.yaml
--- 0.26.4-3/tests/data/openapi/allof_with_required_inherited_nested_object.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/allof_with_required_inherited_nested_object.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,67 @@
+openapi: "3.0.0"
+info:
+  title: Test API - Nested Object Inheritance
+  version: 1.0.0
+paths: {}
+components:
+  schemas:
+    Address:
+      type: object
+      properties:
+        street:
+          type: string
+        city:
+          type: string
+
+    ContactInfo:
+      type: object
+      properties:
+        email:
+          type: string
+        phone:
+          type: string
+
+    ProjectedPerson:
+      type: object
+      properties:
+        id:
+          type: integer
+        name:
+          type: string
+        address:
+          type: object
+          properties:
+            street:
+              type: string
+            city:
+              type: string
+        metadata:
+          type: object
+          additionalProperties:
+            type: string
+        contact:
+          $ref: '#/components/schemas/ContactInfo'
+        tags:
+          type: array
+          items:
+            type: string
+        priority:
+          anyOf:
+            - type: integer
+            - type: string
+
+    Person:
+      allOf:
+        - $ref: '#/components/schemas/ProjectedPerson'
+        - type: object
+          required:
+            - id
+            - name
+            - address
+            - metadata
+            - contact
+            - tags
+            - priority
+          properties:
+            nickname:
+              type: string
diff -pruN 0.26.4-3/tests/data/openapi/any.yaml 0.45.0-1/tests/data/openapi/any.yaml
--- 0.26.4-3/tests/data/openapi/any.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/any.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,34 @@
+info:
+  title: FastAPI
+  version: 0.1.0
+openapi: 3.0.2
+paths:
+  /:
+    post:
+      operationId: read_root__post
+      requestBody:
+        content:
+          application/json:
+            schema:
+              $ref: '#/components/schemas/Item'
+        required: true
+      responses:
+        '200':
+          content:
+            application/json:
+              schema: {}
+          description: Successful Response
+      summary: Read Root
+components:
+  schemas:
+    Item:
+      properties:
+        bar:
+          title: Bar
+        foo:
+          title: Foo
+          type: string
+      required:
+      - foo
+      title: Item
+      type: object
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/anyof.yaml 0.45.0-1/tests/data/openapi/anyof.yaml
--- 0.26.4-3/tests/data/openapi/anyof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/anyof.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,183 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Car:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    AnyOfItem:
+      anyOf:
+        - $ref: "#/components/schemas/Pet"
+        - $ref: "#/components/schemas/Car"
+        - type: object
+          properties:
+              name:
+                type: string
+        - type: string
+          maxLength: 5000
+    AnyOfobj:
+      type: object
+      properties:
+        item:
+          anyOf:
+            - $ref: "#/components/schemas/Pet"
+            - $ref: "#/components/schemas/Car"
+            - type: object
+              properties:
+                name:
+                  type: string
+            - type: string
+              maxLength: 5000
+    AnyOfArray:
+      type: array
+      items:
+          anyOf:
+            - $ref: "#/components/schemas/Pet"
+            - $ref: "#/components/schemas/Car"
+            - type: object
+              properties:
+                name:
+                  type: string
+                birthday:
+                  type: string
+                  format: date
+            - type: string
+              maxLength: 5000
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    Config:
+      properties:
+        setting:
+          type: object
+          additionalProperties:
+            anyOf:
+            - type: string
+            - type: array
+              items:
+                type: string
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/anyof_required.yaml 0.45.0-1/tests/data/openapi/anyof_required.yaml
--- 0.26.4-3/tests/data/openapi/anyof_required.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/anyof_required.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    EmailMessage:
+      title: Email message
+      description: |
+        An email message. There must be at least one recipient in `to`, `cc`, or `bcc`.
+      type: object
+      required:
+        - message
+        - subject
+        - anyOf:
+            - to
+            - cc
+            - bcc
+      properties:
+        bcc:
+          type: array
+          items:
+            type: string
+          description: A list of "blind carbon copy" email addresses.
+        cc:
+          type: array
+          items:
+            type: string
+          description: A list of "carbon copy" email addresses.
+        message:
+          type: string
+          description: The email message text.
+        subject:
+          type: string
+          description: The subject line of the email.
+        to:
+          type: array
+          description: A list of email addresses.
+          items:
+            type: string
diff -pruN 0.26.4-3/tests/data/openapi/api.yaml 0.45.0-1/tests/data/openapi/api.yaml
--- 0.26.4-3/tests/data/openapi/api.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/api.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,179 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+          default: 1
+        name:
+          type: string
+        tag:
+          type: string
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+    Users:
+      type: array
+      items:
+        required:
+          - id
+          - name
+        properties:
+          id:
+            type: integer
+            format: int64
+          name:
+            type: string
+          tag:
+            type: string
+    Id:
+      type: string
+    Rules:
+      type: array
+      items:
+        type: string
+    Error:
+      description: error result
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    apis:
+      type: array
+      items:
+        type: object
+        properties:
+          apiKey:
+            type: string
+            description: To be used as a dataset parameter value
+          apiVersionNumber:
+            type: string
+            description: To be used as a version parameter value
+          apiUrl:
+            type: string
+            format: uri
+            description: "The URL describing the dataset's fields"
+          apiDocumentationUrl:
+            type: string
+            format: uri
+            description: A URL to the API console for each API
+    Event:
+      type: object
+      description: Event object
+      properties:
+        name:
+          type: string
+    Result:
+        type: object
+        properties:
+          event:
+            $ref: '#/components/schemas/Event'
diff -pruN 0.26.4-3/tests/data/openapi/api_constrained.yaml 0.45.0-1/tests/data/openapi/api_constrained.yaml
--- 0.26.4-3/tests/data/openapi/api_constrained.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/api_constrained.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,229 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+            minimum: 0
+            maximum: 100
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+          minimum: 0
+          maximum: 9223372036854775807
+        name:
+          type: string
+          maxLength: 256
+        tag:
+          type: string
+          maxLength: 64
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+      maxItems: 10
+      minItems: 1
+      uniqueItems: true
+    UID:
+      type: integer
+      minimum: 0
+    Users:
+      type: array
+      items:
+        required:
+          - id
+          - name
+          - uid
+        properties:
+          id:
+            type: integer
+            format: int64
+            minimum: 0
+          name:
+            type: string
+            maxLength: 256
+          tag:
+            type: string
+            maxLength: 64
+          uid:
+            $ref: '#/components/schemas/UID'
+          phones:
+            type: array
+            items:
+              type: string
+              minLength: 3
+            maxItems: 10
+          fax:
+            type: array
+            items:
+              type: string
+              minLength: 3
+          height:
+            type:
+              - integer
+              - number
+            minimum: 1
+            maximum: 300
+          weight:
+            type:
+              - number
+              - integer
+            minimum: 1.0
+            maximum: 1000.0
+          age:
+            type: integer
+            minimum: 0.0
+            maximum: 200.0
+            exclusiveMinimum: true
+          rating:
+            type: number
+            minimum: 0
+            exclusiveMinimum: True
+            maximum: 5
+
+    Id:
+      type: string
+    Rules:
+      type: array
+      items:
+        type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    apis:
+      type: array
+      items:
+        type: object
+        properties:
+          apiKey:
+            type: string
+            description: To be used as a dataset parameter value
+          apiVersionNumber:
+            type: string
+            description: To be used as a version parameter value
+          apiUrl:
+            type: string
+            format: uri
+            minLength: 1
+            description: "The URL describing the dataset's fields"
+          apiDocumentationUrl:
+            type: string
+            format: uri
+            description: A URL to the API console for each API
+    Event:
+      type: object
+      properties:
+        name:
+          type: string
+    Result:
+        type: object
+        properties:
+          event:
+            $ref: '#/components/schemas/Event'
diff -pruN 0.26.4-3/tests/data/openapi/api_multiline_docstrings.yaml 0.45.0-1/tests/data/openapi/api_multiline_docstrings.yaml
--- 0.26.4-3/tests/data/openapi/api_multiline_docstrings.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/api_multiline_docstrings.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,179 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+          default: 1
+        name:
+          type: string
+        tag:
+          type: string
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+    Users:
+      type: array
+      items:
+        required:
+          - id
+          - name
+        properties:
+          id:
+            type: integer
+            format: int64
+          name:
+            type: string
+          tag:
+            type: string
+    Id:
+      type: string
+    Rules:
+      type: array
+      items:
+        type: string
+    Error:
+      description: "error result.\nNow with multi-line docstrings."
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    apis:
+      type: array
+      items:
+        type: object
+        properties:
+          apiKey:
+            type: string
+            description: "To be used as a dataset parameter value.\nNow also with multi-line docstrings."
+          apiVersionNumber:
+            type: string
+            description: To be used as a version parameter value
+          apiUrl:
+            type: string
+            format: uri
+            description: "The URL describing the dataset's fields"
+          apiDocumentationUrl:
+            type: string
+            format: uri
+            description: A URL to the API console for each API
+    Event:
+      type: object
+      description: Event object
+      properties:
+        name:
+          type: string
+    Result:
+        type: object
+        properties:
+          event:
+            $ref: '#/components/schemas/Event'
diff -pruN 0.26.4-3/tests/data/openapi/api_ordered_required_fields.yaml 0.45.0-1/tests/data/openapi/api_ordered_required_fields.yaml
--- 0.26.4-3/tests/data/openapi/api_ordered_required_fields.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/api_ordered_required_fields.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,182 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+        - beforeTag
+      properties:
+        id:
+          type: integer
+          format: int64
+          default: 1
+        name:
+          type: string
+        beforeTag:
+          type: string
+        tag:
+          type: string
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+    Users:
+      type: array
+      items:
+        required:
+          - id
+          - name
+        properties:
+          id:
+            type: integer
+            format: int64
+          name:
+            type: string
+          tag:
+            type: string
+    Id:
+      type: string
+    Rules:
+      type: array
+      items:
+        type: string
+    Error:
+      description: error result
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    apis:
+      type: array
+      items:
+        type: object
+        properties:
+          apiKey:
+            type: string
+            description: To be used as a dataset parameter value
+          apiVersionNumber:
+            type: string
+            description: To be used as a version parameter value
+          apiUrl:
+            type: string
+            format: uri
+            description: "The URL describing the dataset's fields"
+          apiDocumentationUrl:
+            type: string
+            format: uri
+            description: A URL to the API console for each API
+    Event:
+      type: object
+      description: Event object
+      properties:
+        name:
+          type: string
+    Result:
+        type: object
+        properties:
+          event:
+            $ref: '#/components/schemas/Event'
diff -pruN 0.26.4-3/tests/data/openapi/array_called_fields_with_oneOf_items.yaml 0.45.0-1/tests/data/openapi/array_called_fields_with_oneOf_items.yaml
--- 0.26.4-3/tests/data/openapi/array_called_fields_with_oneOf_items.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/array_called_fields_with_oneOf_items.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+# input.yaml
+components:
+  schemas:
+    BadSchema:
+      type: object
+      properties:
+        fields:
+          type: array
+          items:
+            oneOf:
+            - type: object
+              properties:
+                a:
+                  type: string
+            - type: object
+              properties:
+                b:
+                  type: string
+                  pattern: "^[a-zA-Z_]+$"
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/array_enum.yaml 0.45.0-1/tests/data/openapi/array_enum.yaml
--- 0.26.4-3/tests/data/openapi/array_enum.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/array_enum.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+openapi: 3.0.0
+info:
+  title: datamodel-code-generator bug example
+components:
+  schemas:
+    Type1:
+      type: array
+      items:
+        type: string
+        enum:
+          - enumOne
+          - enumTwo
+
+    Type2:
+      type: string
+      enum:
+        - enumFour
+        - enumFive
diff -pruN 0.26.4-3/tests/data/openapi/body_and_parameters.yaml 0.45.0-1/tests/data/openapi/body_and_parameters.yaml
--- 0.26.4-3/tests/data/openapi/body_and_parameters.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/body_and_parameters.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,364 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+  description: |
+    This description is for testing
+    multi-line
+    description
+
+servers:
+  - url: http://petstore.swagger.io/v1
+security:
+  - BearerAuth: []
+paths:
+  /pets:
+    $ref: '#/components/pathItems/Pets'
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    put:
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      summary: update a pet
+      tags:
+        - pets
+      requestBody:
+        required: false
+        content:
+          application/json:
+            schema:
+              $ref: '#/components/schemas/PetForm'
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+  /food:
+    post:
+      summary: Create a food
+      tags:
+        - pets
+      requestBody:
+        required: true
+        content:
+          application/problem+json:
+            schema:
+              type: string
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/problem+json:
+              schema:
+                type: string
+  /food/{food_id}:
+    get:
+      summary: Info for a specific pet
+      operationId: showFoodById
+      tags:
+        - foods
+      parameters:
+        - name: food_id
+          in: path
+          description: The id of the food to retrieve
+          schema:
+            type: string
+        - name: message_texts
+          in: query
+          required: false
+          explode: true
+          schema:
+            type: array
+            items:
+              type: string
+      responses:
+        '200':
+          description: OK
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  type: integer
+              examples:
+                example-1:
+                  value:
+                    - 0
+                    - 1
+                    - 3
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+  /foo:
+    get:
+      tags:
+        - foo
+      responses:
+        '200':
+          description: OK
+          content:
+            application/json:
+              schema:
+                type: string
+    parameters:
+    - $ref: '#/components/parameters/MyParam'
+  /bar:
+    post:
+      summary: Create a bar
+      tags:
+        - bar
+      requestBody:
+        content:
+          application/x-www-form-urlencoded:
+            schema:
+              $ref: '#/components/schemas/PetForm'
+  /user:
+    get:
+      tags:
+        - user
+      responses:
+        '200':
+          description: OK
+          content:
+            application/json:
+              schema:
+                type: object
+                properties:
+                  timestamp:
+                    type: string
+                    format: date-time
+                  name:
+                    type: string
+                  age:
+                    type: string
+                required:
+                  - name
+                  - timestamp
+    post:
+      tags:
+        - user
+      requestBody:
+        required: true
+        content:
+          application/json:
+            schema:
+              type: object
+              properties:
+                timestamp:
+                  type: string
+                  format: date-time
+                name:
+                  type: string
+                age:
+                  type: string
+              required:
+                - name
+                - timestamp
+      responses:
+        '201':
+          description: OK
+  /users:
+    get:
+      tags:
+        - user
+      responses:
+        '200':
+          description: OK
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  type: object
+                  properties:
+                    timestamp:
+                      type: string
+                      format: date-time
+                    name:
+                      type: string
+                    age:
+                      type: string
+                  required:
+                    - name
+                    - timestamp
+    post:
+      tags:
+        - user
+      requestBody:
+        required: true
+        content:
+          application/json:
+            schema:
+              type: array
+              items:
+                type: object
+                properties:
+                  timestamp:
+                    type: string
+                    format: date-time
+                  name:
+                    type: string
+                  age:
+                    type: string
+                required:
+                  - name
+                  - timestamp
+      responses:
+        '201':
+          description: OK
+components:
+  parameters:
+    MyParam:
+      name: foo
+      in: query
+      schema:
+        type: string
+  securitySchemes:
+    BearerAuth:
+      type: http
+      scheme: bearer
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    PetForm:
+      title: PetForm
+      type: object
+      properties:
+        name:
+          type: string
+        age:
+          type: integer
+  pathItems:
+    Pets:
+      get:
+        summary: List all pets
+        operationId: listPets
+        tags:
+          - pets
+        security: []
+        parameters:
+          - name: limit
+            in: query
+            description: How many items to return at one time (max 100)
+            required: false
+            schema:
+              default: 0
+              type: integer
+              format: int32
+          - name: HomeAddress
+            in: query
+            required: false
+            schema:
+              default: 'Unknown'
+              type: string
+          - name: kind
+            in: query
+            required: false
+            schema:
+              default: dog
+              type: string
+        responses:
+          '200':
+            description: A paged array of pets
+            headers:
+              x-next:
+                description: A link to the next page of responses
+                schema:
+                  type: string
+            content:
+              application/json:
+                schema:
+                  type: array
+                  items:
+                  - $ref: "#/components/schemas/Pet"
+          default:
+            description: unexpected error
+            content:
+              application/json:
+                schema:
+                  $ref: "#/components/schemas/Error"
+      post:
+        summary: Create a pet
+        tags:
+          - pets
+        requestBody:
+          required: true
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/PetForm'
+        responses:
+          '201':
+            description: Null response
+          default:
+            description: unexpected error
+            content:
+              application/json:
+                schema:
+                  $ref: "#/components/schemas/Error"
diff -pruN 0.26.4-3/tests/data/openapi/body_and_parameters_remote_ref.yaml 0.45.0-1/tests/data/openapi/body_and_parameters_remote_ref.yaml
--- 0.26.4-3/tests/data/openapi/body_and_parameters_remote_ref.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/body_and_parameters_remote_ref.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,263 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+  description: |
+    This description is for testing
+    multi-line
+    description
+
+servers:
+  - url: http://petstore.swagger.io/v1
+security:
+  - BearerAuth: []
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      security: []
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            default: 0
+            type: integer
+            format: int32
+        - name: HomeAddress
+          in: query
+          required: false
+          schema:
+            default: 'Unknown'
+            type: string
+        - name: kind
+          in: query
+          required: false
+          schema:
+            default: dog
+            type: string
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                 - $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    post:
+      summary: Create a pet
+      tags:
+        - pets
+      requestBody:
+        $ref: 'https://schema.example#/components/requestBodies/Pet'
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    put:
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      summary: update a pet
+      tags:
+        - pets
+      requestBody:
+        required: false
+        content:
+          application/json:
+            schema:
+              $ref: 'https://schema.example#/components/schemas/PetForm'
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+  /food:
+    post:
+      summary: Create a food
+      tags:
+        - pets
+      requestBody:
+        required: true
+        content:
+          application/problem+json:
+            schema:
+              type: string
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/problem+json:
+              schema:
+                type: string
+  /food/{food_id}:
+    get:
+      summary: Info for a specific pet
+      operationId: showFoodById
+      tags:
+        - foods
+      parameters:
+        - name: food_id
+          in: path
+          description: The id of the food to retrieve
+          schema:
+            type: string
+        - name: message_texts
+          in: query
+          required: false
+          explode: true
+          schema:
+            type: array
+            items:
+              type: string
+      responses:
+        '200':
+          description: OK
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  type: integer
+              examples:
+                example-1:
+                  value:
+                    - 0
+                    - 1
+                    - 3
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+  /foo:
+    get:
+      tags:
+        - foo
+      responses:
+        200:
+          $ref: 'https://schema.example#/components/responses/OK'
+    parameters:
+      - $ref: 'https://schema.example#/components/parameters/MyParam'
+components:
+  parameters:
+    MyParam:
+      name: foo
+      in: query
+      schema:
+        type: string
+  responses:
+    OK:
+      description: OK
+      content:
+        application/json:
+          schema:
+            type: string
+  requestBodies:
+    Pet:
+      required: true
+      content:
+        application/json:
+          schema:
+            $ref: '#/components/schemas/PetForm'
+  securitySchemes:
+    BearerAuth:
+      type: http
+      scheme: bearer
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    PetForm:
+      title: PetForm
+      type: object
+      properties:
+        name:
+          type: string
+        age:
+          type: integer
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/byte_format.yaml 0.45.0-1/tests/data/openapi/byte_format.yaml
--- 0.26.4-3/tests/data/openapi/byte_format.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/byte_format.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+openapi: '3.0.2'
+components:
+  schemas:
+    Data:
+      description: The version of this API
+      type: string
+      format: byte
+    Api:
+      type: object
+      required:
+        - data
+      properties:
+        data:
+          $ref: "#/components/schemas/Data"
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/circular_imports_acyclic.yaml 0.45.0-1/tests/data/openapi/circular_imports_acyclic.yaml
--- 0.26.4-3/tests/data/openapi/circular_imports_acyclic.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/circular_imports_acyclic.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,31 @@
+openapi: 3.0.0
+info:
+  title: Acyclic Module Dependencies Test
+  version: 1.0.0
+
+paths: {}
+
+components:
+  schemas:
+    # Models in root (will go to __init__.py)
+    RootModel:
+      type: object
+      properties:
+        id:
+          type: string
+
+    # Models with dot notation (will go to sub.py)
+    # sub.py only references RootModel, no back-reference
+    sub.ChildModel:
+      type: object
+      properties:
+        id:
+          type: string
+        parent:
+          $ref: '#/components/schemas/RootModel'
+
+    sub.AnotherChild:
+      type: object
+      properties:
+        sibling:
+          $ref: '#/components/schemas/sub.ChildModel'
diff -pruN 0.26.4-3/tests/data/openapi/circular_imports_class_conflict.yaml 0.45.0-1/tests/data/openapi/circular_imports_class_conflict.yaml
--- 0.26.4-3/tests/data/openapi/circular_imports_class_conflict.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/circular_imports_class_conflict.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,68 @@
+openapi: 3.0.0
+info:
+  title: Circular Import with Class Name Conflicts Test
+  version: 1.0.0
+
+paths: {}
+
+components:
+  schemas:
+    # Root module has Item class
+    Item:
+      type: object
+      properties:
+        id:
+          type: string
+        sub_item:
+          $ref: '#/components/schemas/issuing.Authorization'
+
+    # issuing module also has Item class - will be renamed to Item_1
+    issuing.Item:
+      type: object
+      properties:
+        id:
+          type: string
+
+    issuing.Authorization:
+      type: object
+      properties:
+        id:
+          type: string
+        item:
+          $ref: '#/components/schemas/issuing.Item'
+        invoice:
+          $ref: '#/components/schemas/billing.Invoice'
+
+    # billing module also has Item class - will be renamed to Item_2
+    billing.Item:
+      type: object
+      properties:
+        id:
+          type: string
+
+    billing.Invoice:
+      type: object
+      properties:
+        id:
+          type: string
+        billing_item:
+          $ref: '#/components/schemas/billing.Item'
+        session:
+          $ref: '#/components/schemas/checkout.Session'
+
+    # checkout module also has Item class - will be renamed to Item_3
+    checkout.Item:
+      type: object
+      properties:
+        id:
+          type: string
+
+    checkout.Session:
+      type: object
+      properties:
+        id:
+          type: string
+        checkout_item:
+          $ref: '#/components/schemas/checkout.Item'
+        root_item:
+          $ref: '#/components/schemas/Item'
diff -pruN 0.26.4-3/tests/data/openapi/circular_imports_different_prefixes.yaml 0.45.0-1/tests/data/openapi/circular_imports_different_prefixes.yaml
--- 0.26.4-3/tests/data/openapi/circular_imports_different_prefixes.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/circular_imports_different_prefixes.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,44 @@
+openapi: 3.0.0
+info:
+  title: Circular Import with Different Prefixes Test
+  version: 1.0.0
+
+paths: {}
+
+components:
+  schemas:
+    # Root model
+    Root:
+      type: object
+      properties:
+        id:
+          type: string
+        alpha_ref:
+          $ref: '#/components/schemas/alpha.sub.Model'
+
+    # alpha.sub namespace
+    alpha.sub.Model:
+      type: object
+      properties:
+        id:
+          type: string
+        beta_ref:
+          $ref: '#/components/schemas/beta.sub.Model'
+
+    # beta.sub namespace - different prefix from alpha.sub
+    beta.sub.Model:
+      type: object
+      properties:
+        id:
+          type: string
+        gamma_ref:
+          $ref: '#/components/schemas/gamma.sub.Model'
+
+    # gamma.sub namespace
+    gamma.sub.Model:
+      type: object
+      properties:
+        id:
+          type: string
+        root_ref:
+          $ref: '#/components/schemas/Root'
diff -pruN 0.26.4-3/tests/data/openapi/circular_imports_mixed_prefixes.yaml 0.45.0-1/tests/data/openapi/circular_imports_mixed_prefixes.yaml
--- 0.26.4-3/tests/data/openapi/circular_imports_mixed_prefixes.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/circular_imports_mixed_prefixes.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,53 @@
+openapi: 3.0.0
+info:
+  title: Circular Import with Mixed Prefixes Test
+  version: 1.0.0
+
+paths: {}
+
+components:
+  schemas:
+    # Root model to trigger _internal.py generation
+    Root:
+      type: object
+      properties:
+        id:
+          type: string
+        a_ref:
+          $ref: '#/components/schemas/common.a.Model'
+
+    # common.a namespace - first directory is 'common'
+    common.a.Model:
+      type: object
+      properties:
+        id:
+          type: string
+        b_ref:
+          $ref: '#/components/schemas/common.b.Model'
+
+    # common.b namespace - shares 'common' prefix
+    common.b.Model:
+      type: object
+      properties:
+        id:
+          type: string
+        c_ref:
+          $ref: '#/components/schemas/common.c.Model'
+
+    # common.c namespace - shares 'common' prefix
+    common.c.Model:
+      type: object
+      properties:
+        id:
+          type: string
+        other_ref:
+          $ref: '#/components/schemas/other.x.Model'
+
+    # other.x namespace - different prefix, will trigger break in LCP
+    other.x.Model:
+      type: object
+      properties:
+        id:
+          type: string
+        root_ref:
+          $ref: '#/components/schemas/Root'
diff -pruN 0.26.4-3/tests/data/openapi/circular_imports_small_cycle.yaml 0.45.0-1/tests/data/openapi/circular_imports_small_cycle.yaml
--- 0.26.4-3/tests/data/openapi/circular_imports_small_cycle.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/circular_imports_small_cycle.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+openapi: 3.0.0
+info:
+  title: Small Circular Dependency Test (Below Threshold)
+  version: 1.0.0
+
+paths: {}
+
+components:
+  schemas:
+    # Root model references submodule
+    Parent:
+      type: object
+      properties:
+        id:
+          type: string
+        child:
+          $ref: '#/components/schemas/sub.Child'
+
+    # sub.Child references back to root - creates 2-module cycle
+    # This WILL trigger _internal.py (all SCCs are merged)
+    sub.Child:
+      type: object
+      properties:
+        id:
+          type: string
+        parent:
+          $ref: '#/components/schemas/Parent'
diff -pruN 0.26.4-3/tests/data/openapi/circular_imports_stripe_like.yaml 0.45.0-1/tests/data/openapi/circular_imports_stripe_like.yaml
--- 0.26.4-3/tests/data/openapi/circular_imports_stripe_like.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/circular_imports_stripe_like.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,78 @@
+openapi: 3.0.0
+info:
+  title: Stripe-like Circular Import Test
+  version: 1.0.0
+
+paths: {}
+
+components:
+  schemas:
+    # Models in root (will go to __init__.py)
+    BalanceTransaction:
+      type: object
+      properties:
+        id:
+          type: string
+        amount:
+          type: integer
+        source:
+          $ref: '#/components/schemas/issuing.Authorization'
+
+    # Models with dot notation (will go to issuing.py)
+    issuing.Authorization:
+      type: object
+      properties:
+        id:
+          type: string
+        invoice:
+          $ref: '#/components/schemas/billing.Invoice'
+        cardholder:
+          $ref: '#/components/schemas/issuing.Cardholder'
+
+    issuing.Cardholder:
+      type: object
+      properties:
+        id:
+          type: string
+        name:
+          type: string
+
+    # Models in billing module
+    billing.Invoice:
+      type: object
+      properties:
+        id:
+          type: string
+        session:
+          $ref: '#/components/schemas/checkout.Session'
+        subscription:
+          $ref: '#/components/schemas/billing.Subscription'
+
+    billing.Subscription:
+      type: object
+      properties:
+        id:
+          type: string
+        plan:
+          type: string
+
+    # Models in checkout module - completes the cycle back to root
+    checkout.Session:
+      type: object
+      properties:
+        id:
+          type: string
+        transaction:
+          $ref: '#/components/schemas/BalanceTransaction'
+        line_items:
+          type: array
+          items:
+            $ref: '#/components/schemas/checkout.LineItem'
+
+    checkout.LineItem:
+      type: object
+      properties:
+        id:
+          type: string
+        price:
+          type: integer
diff -pruN 0.26.4-3/tests/data/openapi/circular_imports_with_inheritance.yaml 0.45.0-1/tests/data/openapi/circular_imports_with_inheritance.yaml
--- 0.26.4-3/tests/data/openapi/circular_imports_with_inheritance.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/circular_imports_with_inheritance.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,59 @@
+openapi: 3.0.0
+info:
+  title: Circular Import with Inheritance Test
+  version: 1.0.0
+
+paths: {}
+
+components:
+  schemas:
+    # Base class in root module
+    BaseEntity:
+      type: object
+      properties:
+        id:
+          type: string
+        created_at:
+          type: string
+
+    # Root model that references submodule
+    RootModel:
+      type: object
+      properties:
+        id:
+          type: string
+        auth:
+          $ref: '#/components/schemas/issuing.Authorization'
+
+    # issuing.Authorization inherits from BaseEntity (creates base_class edge)
+    issuing.Authorization:
+      allOf:
+        - $ref: '#/components/schemas/BaseEntity'
+        - type: object
+          properties:
+            amount:
+              type: integer
+            invoice:
+              $ref: '#/components/schemas/billing.Invoice'
+
+    # billing.Invoice also inherits from BaseEntity
+    billing.Invoice:
+      allOf:
+        - $ref: '#/components/schemas/BaseEntity'
+        - type: object
+          properties:
+            total:
+              type: integer
+            session:
+              $ref: '#/components/schemas/checkout.Session'
+
+    # checkout.Session inherits from BaseEntity and references back to root
+    checkout.Session:
+      allOf:
+        - $ref: '#/components/schemas/BaseEntity'
+        - type: object
+          properties:
+            status:
+              type: string
+            root_ref:
+              $ref: '#/components/schemas/RootModel'
diff -pruN 0.26.4-3/tests/data/openapi/complex_reference.json 0.45.0-1/tests/data/openapi/complex_reference.json
--- 0.26.4-3/tests/data/openapi/complex_reference.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/complex_reference.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,58 @@
+{
+    "openapi": "3.0.0",
+    "components": {
+        "schemas": {
+            "A": {
+                "properties": {
+                    "a_property": {
+                        "$ref": "#/components/schemas/B1"
+                    },
+                },
+                "type": "object"
+            },
+            "B1": {
+                "allOf": [
+                    {
+                        "$ref": "#/components/schemas/A"
+                    }
+                ],
+                "type": "object"
+            },
+            "C1": {
+                "allOf": [
+                    {
+                        "$ref": "#/components/schemas/B1"
+                    }
+                ],
+                "type": "object"
+            },
+            "B2": {
+                "allOf": [
+                    {
+                        "$ref": "#/components/schemas/A"
+                    }
+                ],
+                "type": "object"
+            },
+            "D1": {
+                "allOf": [
+                    {
+                        "$ref": "#/components/schemas/C1"
+                    }
+                ],
+                "type": "object"
+            },
+            "D1andB2": {
+                "allOf": [
+                    {
+                        "$ref": "#/components/schemas/D1"
+                    },
+                    {
+                        "$ref": "#/components/schemas/B2"
+                    }
+                ],
+                "type": "object"
+            }
+        }
+    }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/const.json 0.45.0-1/tests/data/openapi/const.json
--- 0.26.4-3/tests/data/openapi/const.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/const.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+{
+  "components": {
+    "schemas": {
+      "Namespace": {
+        "type": "object",
+        "required": [
+          "apiVersion",
+          "kind"
+        ],
+        "properties": {
+          "apiVersion": {
+            "const": "v1"
+          },
+          "kind": {
+            "const": "Namespace"
+          }
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/const.yaml 0.45.0-1/tests/data/openapi/const.yaml
--- 0.26.4-3/tests/data/openapi/const.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/const.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+openapi: '3.0.2'
+components:
+  schemas:
+    ApiVersion:
+      description: The version of this API
+      type: string
+      const: v1
+    Api:
+      type: object
+      required:
+        - version
+      properties:
+        version:
+          $ref: "#/components/schemas/ApiVersion"
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/content_in_parameters.yaml 0.45.0-1/tests/data/openapi/content_in_parameters.yaml
--- 0.26.4-3/tests/data/openapi/content_in_parameters.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/content_in_parameters.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,73 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+  description: |
+    This description is for testing
+    multi-line
+    description
+
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /{supiOrSuci}/security-information-rg:
+    get:
+      summary: Get authentication data for the FN-RG
+      operationId: GetRgAuthData
+      tags:
+        - Get Auth Data for FN-RG
+      responses:
+        '200':
+          description: Null response
+      parameters:
+        - name: plmn-id
+          in: query
+          description: serving PLMN ID
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/Pet'
+components:
+  parameters:
+    MyParam:
+      name: foo
+      in: query
+      schema:
+        type: string
+  securitySchemes:
+    BearerAuth:
+      type: http
+      scheme: bearer
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    PetForm:
+      title: PetForm
+      type: object
+      properties:
+        name:
+          type: string
+        age:
+          type: integer
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/custom_id.yaml 0.45.0-1/tests/data/openapi/custom_id.yaml
--- 0.26.4-3/tests/data/openapi/custom_id.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/custom_id.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+openapi: 3.0.0
+components:
+  schemas:
+    CustomId:
+      description: My custom ID
+      type: string
+      format: uuid
+    Model:
+      type: object
+      properties:
+        custom_id:
+          $ref: "#/components/schemas/CustomId"
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/dataclass_inheritance_field_ordering.yaml 0.45.0-1/tests/data/openapi/dataclass_inheritance_field_ordering.yaml
--- 0.26.4-3/tests/data/openapi/dataclass_inheritance_field_ordering.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/dataclass_inheritance_field_ordering.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+openapi: "3.0.0"
+info:
+  title: Dataclass Inheritance Field Ordering Test
+  version: "1.0"
+components:
+  schemas:
+    ParentWithDefault:
+      type: object
+      properties:
+        name:
+          type: string
+          default: "default_name"
+        read_only_field:
+          type: string
+          readOnly: true
+
+    ChildWithRequired:
+      allOf:
+        - $ref: '#/components/schemas/ParentWithDefault'
+        - type: object
+          properties:
+            child_id:
+              type: string
+          required:
+            - child_id
diff -pruN 0.26.4-3/tests/data/openapi/datetime.yaml 0.45.0-1/tests/data/openapi/datetime.yaml
--- 0.26.4-3/tests/data/openapi/datetime.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/datetime.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    InventoryItem:
+      required:
+#      - id
+#      - name
+      - releaseDate
+      type: object
+      properties:
+#        id:
+#          type: string
+#          format: uuid
+#          example: d290f1ee-6c54-4b01-90e6-d701748f0851
+#        name:
+#          type: string
+#          example: Widget Adapter
+        releaseDate:
+          type: string
+          format: date-time
+          example: 2016-08-29T09:12:33.001Z
diff -pruN 0.26.4-3/tests/data/openapi/default_object.yaml 0.45.0-1/tests/data/openapi/default_object.yaml
--- 0.26.4-3/tests/data/openapi/default_object.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/default_object.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,81 @@
+openapi: 3.0.3
+info:
+  title: Example
+  version: 0.1.0
+servers:
+- url: http://example.com
+paths:
+  /foo:
+    delete:
+      responses:
+        '200':
+          description: OK
+components:
+  schemas:
+    Foo:
+      type: object
+      properties:
+        text:
+          type: string
+          default: "987"
+        number:
+          type: number
+    Bar:
+      type: object
+      properties:
+        foo:
+          allOf:
+          - $ref: '#/components/schemas/Foo'
+          default:
+            text: abc
+            number: 123
+        baz:
+          type: array
+          items:
+            $ref: '#/components/schemas/Foo'
+          default:
+            - text: abc
+              number: 123
+            - text: efg
+              number: 456
+    Nested.Foo:
+      type: string
+    Nested.Bar:
+      type: object
+      properties:
+        foo:
+          allOf:
+            - $ref: '#/components/schemas/Foo'
+          default:
+            text: abc
+            number: 123
+        baz:
+          type: array
+          items:
+            $ref: '#/components/schemas/Foo'
+          default:
+            - text: abc
+              number: 123
+            - text: efg
+              number: 456
+        nested_foo:
+          $ref: '#/components/schemas/Nested.Foo'
+          default: 'default foo'
+    Another.Foo:
+      type: string
+    Another.Bar:
+      type: object
+      properties:
+        original_foo:
+          allOf:
+            - $ref: '#/components/schemas/Foo'
+          default:
+            text: abc
+            number: 123
+        nested_foo:
+          type: array
+          items:
+            $ref: '#/components/schemas/Nested.Foo'
+          default:
+            - abc
+            - efg
diff -pruN 0.26.4-3/tests/data/openapi/definitions.yaml 0.45.0-1/tests/data/openapi/definitions.yaml
--- 0.26.4-3/tests/data/openapi/definitions.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/definitions.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,43 @@
+openapi: "3.0.0"
+schemas:
+  Problem:
+    properties:
+      detail:
+        description: |
+          A human readable explanation specific to this occurrence of the
+          problem. You MUST NOT expose internal information, personal
+          data or implementation details through this field.
+        example: Request took too long to complete.
+        type: string
+      instance:
+        description: |
+          An absolute URI that identifies the specific occurrence of the problem.
+          It may or may not yield further information if dereferenced.
+        format: uri
+        type: string
+      status:
+        description: |
+          The HTTP status code generated by the origin server for this occurrence
+          of the problem.
+        example: 503
+        exclusiveMaximum: true
+        format: int32
+        maximum: 600
+        minimum: 100
+        type: integer
+      title:
+        description: |
+          A short, summary of the problem type. Written in english and readable
+          for engineers (usually not suited for non technical stakeholders and
+          not localized); example: Service Unavailable
+        type: string
+      type:
+        default: about:blank
+        description: |
+          An absolute URI that identifies the problem type.  When dereferenced,
+          it SHOULD provide human-readable documentation for the problem type
+          (e.g., using HTML).
+        example: https://tools.ietf.org/html/rfc7231#section-6.6.4
+        format: uri
+        type: string
+    type: object
diff -pruN 0.26.4-3/tests/data/openapi/discriminator.yaml 0.45.0-1/tests/data/openapi/discriminator.yaml
--- 0.26.4-3/tests/data/openapi/discriminator.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/discriminator.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,42 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    ObjectBase:
+      description: Object schema
+      type: object
+      properties:
+        name:
+          description: Name of the object
+          type: string
+        type:
+          description: Object type
+          type: string
+          enum:
+            - my_first_object
+            - my_second_object
+            - my_third_object
+    CreateObjectRequest:
+      description: Request schema for object creation
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
+      required:
+        - name
+        - type
+    UpdateObjectRequest:
+      description: Request schema for object updates
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
+    Demo:
+        oneOf:
+          - $ref: "#/components/schemas/ObjectBase"
+          - $ref: "#/components/schemas/CreateObjectRequest"
+          - $ref: "#/components/schemas/UpdateObjectRequest"
+        discriminator:
+          propertyName: type
+          mapping:
+            type1: "#/components/schemas/ObjectBase"
+            type2: "#/components/schemas/CreateObjectRequest"
+            type3: "#/components/schemas/UpdateObjectRequest"
+
diff -pruN 0.26.4-3/tests/data/openapi/discriminator_allof.yaml 0.45.0-1/tests/data/openapi/discriminator_allof.yaml
--- 0.26.4-3/tests/data/openapi/discriminator_allof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/discriminator_allof.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,62 @@
+# Example from https://spec.openapis.org/oas/v3.1.1.html#examples-1
+# This tests discriminator without oneOf/anyOf, where subtypes use allOf
+
+openapi: 3.1.0
+info:
+  title: Test
+  description: "Test API"
+  version: 0.0.0
+paths:
+  /pet:
+    get:
+      responses:
+        '200':
+          description: "Pet"
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/PetContainer"
+components:
+  schemas:
+    PetContainer:
+      type: object
+      required:
+        - pet
+      properties:
+        pet:
+          $ref: "#/components/schemas/Pet"
+    Pet:
+      type: object
+      required:
+        - petType
+      properties:
+        petType:
+          type: string
+      discriminator:
+        propertyName: petType
+        mapping:
+          cat: "#/components/schemas/Cat"
+          dog: "#/components/schemas/Dog"
+          lizard: "#/components/schemas/Lizard"
+    Cat:
+      allOf:
+        - $ref: "#/components/schemas/Pet"
+        - type: object
+          properties:
+            name:
+              type: string
+    Dog:
+      allOf:
+        - $ref: "#/components/schemas/Pet"
+        - type: object
+          properties:
+            bark:
+              type: string
+    Lizard:
+      allOf:
+        - $ref: "#/components/schemas/Pet"
+        - type: object
+          properties:
+            lovesRocks:
+              type: boolean
+
diff -pruN 0.26.4-3/tests/data/openapi/discriminator_allof_no_subtypes.yaml 0.45.0-1/tests/data/openapi/discriminator_allof_no_subtypes.yaml
--- 0.26.4-3/tests/data/openapi/discriminator_allof_no_subtypes.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/discriminator_allof_no_subtypes.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,50 @@
+# Tests discriminator without any allOf subtypes
+# This tests the edge case where a schema has a discriminator but nothing inherits from it
+
+openapi: 3.1.0
+info:
+  title: Test
+  description: "Test API"
+  version: 0.0.0
+paths:
+  /item:
+    get:
+      responses:
+        '200':
+          description: "Item"
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/ItemContainer"
+components:
+  schemas:
+    ItemContainer:
+      type: object
+      required:
+        - item
+      properties:
+        item:
+          $ref: "#/components/schemas/BaseItem"
+    BaseItem:
+      type: object
+      required:
+        - itemType
+      properties:
+        itemType:
+          type: string
+      discriminator:
+        propertyName: itemType
+        mapping:
+          foo: "#/components/schemas/FooItem"
+          bar: "#/components/schemas/BarItem"
+    # These schemas exist but don't use allOf to inherit from BaseItem
+    FooItem:
+      type: object
+      properties:
+        fooValue:
+          type: string
+    BarItem:
+      type: object
+      properties:
+        barValue:
+          type: integer
diff -pruN 0.26.4-3/tests/data/openapi/discriminator_enum.yaml 0.45.0-1/tests/data/openapi/discriminator_enum.yaml
--- 0.26.4-3/tests/data/openapi/discriminator_enum.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/discriminator_enum.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,40 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    Request:
+      oneOf:
+        - $ref: '#/components/schemas/RequestV1'
+        - $ref: '#/components/schemas/RequestV2'
+      discriminator:
+        propertyName: version
+        mapping:
+          v1: '#/components/schemas/RequestV1'
+          v2: '#/components/schemas/RequestV2'
+
+    RequestVersionEnum:
+      type: string
+      description: this is not included!
+      title: no title!
+      enum:
+        - v1
+        - v2
+    RequestBase:
+      properties:
+        version:
+          $ref: '#/components/schemas/RequestVersionEnum'
+      required:
+        - version
+
+    RequestV1:
+      allOf:
+        - $ref: '#/components/schemas/RequestBase'
+      properties:
+        request_id:
+          type: string
+          title: test title
+          description: there is description
+      required:
+        - request_id
+    RequestV2:
+      allOf:
+        - $ref: '#/components/schemas/RequestBase'
diff -pruN 0.26.4-3/tests/data/openapi/discriminator_enum_duplicate.yaml 0.45.0-1/tests/data/openapi/discriminator_enum_duplicate.yaml
--- 0.26.4-3/tests/data/openapi/discriminator_enum_duplicate.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/discriminator_enum_duplicate.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,64 @@
+# Example from https://docs.pydantic.dev/latest/concepts/unions/#discriminated-unions
+openapi: 3.1.0
+components:
+  schemas:
+    Cat:
+      properties:
+        pet_type:
+          const: "cat"
+          title: "Pet Type"
+        meows:
+          title: Meows
+          type: integer
+      required:
+        - pet_type
+        - meows
+      title: Cat
+      type: object
+    Dog:
+      properties:
+        pet_type:
+          const: "dog"
+          title: "Pet Type"
+        barks:
+          title: Barks
+          type: number
+      required:
+        - pet_type
+        - barks
+      title: Dog
+      type: object
+    Lizard:
+      properties:
+        pet_type:
+          enum:
+            - reptile
+            - lizard
+          title: Pet Type
+          type: string
+        scales:
+          title: Scales
+          type: boolean
+      required:
+        - pet_type
+        - scales
+      title: Lizard
+      type: object
+    Animal:
+      properties:
+        pet:
+          discriminator:
+            mapping:
+              cat: '#/components/schemas/Cat'
+              dog: '#/components/schemas/Dog'
+              lizard: '#/components/schemas/Lizard'
+              reptile: '#/components/schemas/Lizard'
+            propertyName: pet_type
+          oneOf:
+            - $ref: '#/components/schemas/Cat'
+            - $ref: '#/components/schemas/Dog'
+            - $ref: '#/components/schemas/Lizard'
+          title: Pet
+        'n':
+          title: 'N'
+          type: integer
diff -pruN 0.26.4-3/tests/data/openapi/discriminator_enum_sanitized.yaml 0.45.0-1/tests/data/openapi/discriminator_enum_sanitized.yaml
--- 0.26.4-3/tests/data/openapi/discriminator_enum_sanitized.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/discriminator_enum_sanitized.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,36 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    Request:
+      oneOf:
+        - $ref: '#/components/schemas/RequestV1'
+        - $ref: '#/components/schemas/RequestV2'
+      discriminator:
+        propertyName: version
+        mapping:
+          v1-beta: '#/components/schemas/RequestV1'
+          v2: '#/components/schemas/RequestV2'
+
+    RequestVersionEnum:
+      type: string
+      enum:
+        - v1-beta
+        - v2
+    RequestBase:
+      properties:
+        version:
+          $ref: '#/components/schemas/RequestVersionEnum'
+      required:
+        - version
+
+    RequestV1:
+      allOf:
+        - $ref: '#/components/schemas/RequestBase'
+      properties:
+        request_id:
+          type: string
+      required:
+        - request_id
+    RequestV2:
+      allOf:
+        - $ref: '#/components/schemas/RequestBase'
diff -pruN 0.26.4-3/tests/data/openapi/discriminator_enum_single_value.yaml 0.45.0-1/tests/data/openapi/discriminator_enum_single_value.yaml
--- 0.26.4-3/tests/data/openapi/discriminator_enum_single_value.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/discriminator_enum_single_value.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,31 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    ToolCall:
+      oneOf:
+        - $ref: '#/components/schemas/FunctionToolCall'
+      discriminator:
+        propertyName: type
+
+    ToolType:
+      type: string
+      enum:
+        - function
+
+    ToolBase:
+      type: object
+      properties:
+        type:
+          $ref: '#/components/schemas/ToolType'
+      required:
+        - type
+
+    FunctionToolCall:
+      allOf:
+        - $ref: '#/components/schemas/ToolBase'
+      type: object
+      properties:
+        id:
+          type: string
+      required:
+        - id
diff -pruN 0.26.4-3/tests/data/openapi/discriminator_enum_single_value_anyof.yaml 0.45.0-1/tests/data/openapi/discriminator_enum_single_value_anyof.yaml
--- 0.26.4-3/tests/data/openapi/discriminator_enum_single_value_anyof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/discriminator_enum_single_value_anyof.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,31 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    ToolCallUnion:
+      anyOf:
+        - $ref: '#/components/schemas/FunctionToolCall'
+        - $ref: '#/components/schemas/CustomToolCall'
+      discriminator:
+        propertyName: type
+
+    ToolType:
+      type: string
+      enum:
+        - function
+
+    FunctionToolCall:
+      type: object
+      properties:
+        id:
+          type: string
+        type:
+          $ref: '#/components/schemas/ToolType'
+      required:
+        - id
+        - type
+
+    CustomToolCall:
+      type: object
+      properties:
+        type:
+          type: string
diff -pruN 0.26.4-3/tests/data/openapi/discriminator_in_array_anyof.yaml 0.45.0-1/tests/data/openapi/discriminator_in_array_anyof.yaml
--- 0.26.4-3/tests/data/openapi/discriminator_in_array_anyof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/discriminator_in_array_anyof.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,48 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    ObjectBase:
+      description: Object schema
+      type: object
+      properties:
+        name:
+          description: Name of the object
+          type: string
+        type:
+          description: Object type
+          type: string
+          enum:
+            - my_first_object
+            - my_second_object
+    CreateObjectRequest:
+      description: Request schema for object creation
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
+      required:
+        - name
+        - type
+    UpdateObjectRequest:
+      description: Request schema for object updates
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
+    Demo:
+      type: object
+      required:
+        - myArray
+      properties:
+        myArray:
+          type: array
+          items:
+            oneOf:
+              - $ref: "#/components/schemas/ObjectBase"
+              - $ref: "#/components/schemas/CreateObjectRequest"
+              - $ref: "#/components/schemas/UpdateObjectRequest"
+            discriminator:
+              propertyName: type
+              mapping:
+                type1: "#/components/schemas/ObjectBase"
+                type2: "#/components/schemas/CreateObjectRequest"
+                type3: "#/components/schemas/UpdateObjectRequest"
+
diff -pruN 0.26.4-3/tests/data/openapi/discriminator_in_array_oneof.yaml 0.45.0-1/tests/data/openapi/discriminator_in_array_oneof.yaml
--- 0.26.4-3/tests/data/openapi/discriminator_in_array_oneof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/discriminator_in_array_oneof.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,48 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    ObjectBase:
+      description: Object schema
+      type: object
+      properties:
+        name:
+          description: Name of the object
+          type: string
+        type:
+          description: Object type
+          type: string
+          enum:
+            - my_first_object
+            - my_second_object
+    CreateObjectRequest:
+      description: Request schema for object creation
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
+      required:
+        - name
+        - type
+    UpdateObjectRequest:
+      description: Request schema for object updates
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
+    Demo:
+      type: object
+      required:
+        - myArray
+      properties:
+        myArray:
+          type: array
+          items:
+            anyOf:
+              - $ref: "#/components/schemas/ObjectBase"
+              - $ref: "#/components/schemas/CreateObjectRequest"
+              - $ref: "#/components/schemas/UpdateObjectRequest"
+            discriminator:
+              propertyName: type
+              mapping:
+                type1: "#/components/schemas/ObjectBase"
+                type2: "#/components/schemas/CreateObjectRequest"
+                type3: "#/components/schemas/UpdateObjectRequest"
+
diff -pruN 0.26.4-3/tests/data/openapi/discriminator_with_properties.yaml 0.45.0-1/tests/data/openapi/discriminator_with_properties.yaml
--- 0.26.4-3/tests/data/openapi/discriminator_with_properties.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/discriminator_with_properties.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,46 @@
+openapi: 3.0.1
+components:
+  schemas:
+    CustomContextVariable:
+      oneOf:
+        - $ref: '#/components/schemas/UserContextVariable'
+        - $ref: '#/components/schemas/IssueContextVariable'
+      properties:
+        "@type":
+          description: Type of custom context variable.
+          type: string
+      discriminator:
+        mapping:
+          user: '#/components/schemas/UserContextVariable'
+          issue: '#/components/schemas/IssueContextVariable'
+        propertyName: "@type"
+      required:
+        - "@type"
+      type: object
+    UserContextVariable:
+      properties:
+        accountId:
+          description: The account ID of the user.
+          type: string
+        "@type":
+          description: Type of custom context variable.
+          type: string
+      required:
+        - accountId
+        - "@type"
+      type: object
+    IssueContextVariable:
+      properties:
+        id:
+          description: The issue ID.
+          format: int64
+          type: integer
+        key:
+          description: The issue key.
+          type: string
+        "@type":
+          description: Type of custom context variable.
+          type: string
+      required:
+        - "@type"
+      type: object
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/discriminator_without_mapping.yaml 0.45.0-1/tests/data/openapi/discriminator_without_mapping.yaml
--- 0.26.4-3/tests/data/openapi/discriminator_without_mapping.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/discriminator_without_mapping.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,38 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    ObjectBase:
+      description: Object schema
+      type: object
+      properties:
+        name:
+          description: Name of the object
+          type: string
+        type:
+          description: Object type
+          type: string
+          enum:
+            - my_first_object
+            - my_second_object
+            - my_third_object
+    CreateObjectRequest:
+      description: Request schema for object creation
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
+      required:
+        - name
+        - type
+    UpdateObjectRequest:
+      description: Request schema for object updates
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
+    Demo:
+        oneOf:
+          - $ref: "#/components/schemas/ObjectBase"
+          - $ref: "#/components/schemas/CreateObjectRequest"
+          - $ref: "#/components/schemas/UpdateObjectRequest"
+        discriminator:
+          propertyName: type
+
diff -pruN 0.26.4-3/tests/data/openapi/dot_notation_deep_inheritance.yaml 0.45.0-1/tests/data/openapi/dot_notation_deep_inheritance.yaml
--- 0.26.4-3/tests/data/openapi/dot_notation_deep_inheritance.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/dot_notation_deep_inheritance.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+openapi: "3.0.0"
+info:
+  title: Test API with deep inheritance from ancestor packages
+  version: "1.0.0"
+paths: {}
+components:
+  schemas:
+    v0.Animal:
+      type: object
+      description: Base animal in root package
+      properties:
+        species:
+          type: string
+    v0.mammal.Dog:
+      description: Dog inheriting from Animal (2 levels up)
+      allOf:
+        - $ref: '#/components/schemas/v0.Animal'
+        - type: object
+          properties:
+            breed:
+              type: string
+    v0.mammal.canine.Puppy:
+      description: Puppy inheriting from Animal (3 levels up)
+      allOf:
+        - $ref: '#/components/schemas/v0.Animal'
+        - type: object
+          properties:
+            age_weeks:
+              type: integer
diff -pruN 0.26.4-3/tests/data/openapi/dot_notation_inheritance.yaml 0.45.0-1/tests/data/openapi/dot_notation_inheritance.yaml
--- 0.26.4-3/tests/data/openapi/dot_notation_inheritance.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/dot_notation_inheritance.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+openapi: "3.0.0"
+info:
+  title: Test API with dot notation schema names and inheritance
+  version: "1.0.0"
+paths: {}
+components:
+  schemas:
+    v0.properties:
+      type: object
+      description: Base properties schema
+      properties:
+        name:
+          type: string
+    v0.animal:
+      type: object
+      description: Animal schema that references v0.properties
+      properties:
+        species:
+          type: string
+        props:
+          $ref: '#/components/schemas/v0.properties'
+    v0.animal.dog:
+      description: Dog schema that extends v0.animal
+      allOf:
+        - $ref: '#/components/schemas/v0.animal'
+        - type: object
+          properties:
+            breed:
+              type: string
diff -pruN 0.26.4-3/tests/data/openapi/duplicate_model_simplify.yaml 0.45.0-1/tests/data/openapi/duplicate_model_simplify.yaml
--- 0.26.4-3/tests/data/openapi/duplicate_model_simplify.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/duplicate_model_simplify.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+openapi: "3.0.3"
+info:
+  version: 1.0.0
+  title: T
+paths: {}
+components:
+  schemas:
+    M:
+      properties:
+        name: {"type": "string"}
+    m:
+      properties:
+        name: {"type": "string"}
+    R:
+      allOf:
+        - {"$ref":  "#/components/schemas/m"}
+        - {"$ref":  "#/components/schemas/M"}
diff -pruN 0.26.4-3/tests/data/openapi/duplicate_models.yaml 0.45.0-1/tests/data/openapi/duplicate_models.yaml
--- 0.26.4-3/tests/data/openapi/duplicate_models.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/duplicate_models.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,113 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    Event:
+      type: object
+      properties:
+        name:
+          type: string
+    Result:
+        type: object
+        properties:
+          event:
+            $ref: '#/components/schemas/Event'
+    Events:
+        type: array
+        items:
+          $ref: '#/components/schemas/Event'
+    EventRoot:
+        $ref: '#/components/schemas/Event'
+    EventObject:
+        type: object
+        properties:
+          event:
+            $ref: '#/components/schemas/Event'
+    DuplicateObject1:
+        type: object
+        properties:
+          event:
+            type: array
+            items:
+              $ref: '#/components/schemas/Event'
+    DuplicateObject2:
+        type: object
+        properties:
+          event:
+            type: object
+            properties:
+              event:
+                $ref: '#/components/schemas/Event'
+    DuplicateObject3:
+        $ref: '#/components/schemas/Event'
diff -pruN 0.26.4-3/tests/data/openapi/duplicate_models2.yaml 0.45.0-1/tests/data/openapi/duplicate_models2.yaml
--- 0.26.4-3/tests/data/openapi/duplicate_models2.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/duplicate_models2.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,72 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: Get pet
+      operationId: getPets
+      responses:
+        '200':
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+  /cars:
+    get:
+      summary: Get car
+      operationId: getCar
+      responses:
+        '200':
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Cars"
+
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+        - type
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+        type:
+          type: string
+          enum: [ 'pet' ]
+        details:
+          type: object
+          properties:
+            race: { type: string }
+    Car:
+      required:
+        - id
+        - name
+        - type
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+        type:
+          type: string
+          enum: [ 'car' ]
+        details:
+          type: object
+          properties:
+            brand: { type: string }
diff -pruN 0.26.4-3/tests/data/openapi/empty_aliases.json 0.45.0-1/tests/data/openapi/empty_aliases.json
--- 0.26.4-3/tests/data/openapi/empty_aliases.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/empty_aliases.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1 @@
+{}
diff -pruN 0.26.4-3/tests/data/openapi/empty_data.json 0.45.0-1/tests/data/openapi/empty_data.json
--- 0.26.4-3/tests/data/openapi/empty_data.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/empty_data.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1 @@
+{}
diff -pruN 0.26.4-3/tests/data/openapi/empty_dict_default.yaml 0.45.0-1/tests/data/openapi/empty_dict_default.yaml
--- 0.26.4-3/tests/data/openapi/empty_dict_default.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/empty_dict_default.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,28 @@
+openapi: 3.1.0
+info:
+  title: Empty Dict Default Test
+  version: 0.1.0
+servers:
+- url: http://example.com
+paths:
+  /test:
+    get:
+      responses:
+        '200':
+          description: OK
+components:
+  schemas:
+    ObjectMeta:
+      type: object
+      properties:
+        name:
+          type: string
+        namespace:
+          type: string
+    PodSpec:
+      type: object
+      properties:
+        metadata:
+          allOf:
+            - $ref: '#/components/schemas/ObjectMeta'
+          default: {}
diff -pruN 0.26.4-3/tests/data/openapi/empty_list_default.yaml 0.45.0-1/tests/data/openapi/empty_list_default.yaml
--- 0.26.4-3/tests/data/openapi/empty_list_default.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/empty_list_default.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+openapi: 3.1.0
+info:
+  title: Empty List Default Test
+  version: 0.1.0
+servers:
+- url: http://example.com
+paths:
+  /test:
+    get:
+      responses:
+        '200':
+          description: OK
+components:
+  schemas:
+    Container:
+      type: object
+      properties:
+        name:
+          type: string
+    PodSpec:
+      type: object
+      properties:
+        containers:
+          type: array
+          items:
+            $ref: '#/components/schemas/Container'
+          default: []
diff -pruN 0.26.4-3/tests/data/openapi/enum_builtin_conflict.yaml 0.45.0-1/tests/data/openapi/enum_builtin_conflict.yaml
--- 0.26.4-3/tests/data/openapi/enum_builtin_conflict.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/enum_builtin_conflict.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+openapi: "3.0.0"
+info:
+  title: Test API
+  version: "1.0.0"
+paths: {}
+components:
+  schemas:
+    StringMethodEnum:
+      type: string
+      enum:
+        - count
+        - index
+        - format
+        - normal_value
diff -pruN 0.26.4-3/tests/data/openapi/enum_models.yaml 0.45.0-1/tests/data/openapi/enum_models.yaml
--- 0.26.4-3/tests/data/openapi/enum_models.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/enum_models.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,154 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+        - number
+        - boolean
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+        kind:
+          type: string
+          enum: ['dog', 'cat']
+        type:
+          type: string
+          enum: [ 'animal' ]
+        number:
+          type: integer
+          enum: [ 1 ]
+        boolean:
+          type: boolean
+          enum: [ true ]
+
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+    animal:
+      type: object
+      properties:
+        kind:
+          type: string
+          enum: ['snake', 'rabbit']
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    EnumObject:
+      type: object
+      properties:
+        type:
+          enum: ['a', 'b']
+          type: string
+    EnumRoot:
+      enum: ['a', 'b']
+      type: string
+    IntEnum:
+      enum: [1,2]
+      type: number
+    AliasEnum:
+      enum: [1,2,3]
+      type: number
+      x-enum-varnames: ['a', 'b', 'c']
+    MultipleTypeEnum:
+      enum: [ "red", "amber", "green", null, 42 ]
+    singleEnum:
+      enum: [ "pet" ]
+      type: string
+    arrayEnum:
+      type: array
+      items: [
+        { enum: [ "cat" ] },
+        { enum: [ "dog"]}
+      ]
+    nestedNullableEnum:
+      type: object
+      properties:
+        nested_version:
+          type: string
+          nullable: true
+          default: RC1
+          description: nullable enum
+          example: RC2
+          enum:
+            - RC1
+            - RC1N
+            - RC2
+            - RC2N
+            - RC3
+            - RC4
+            - null
+    version:
+      type: string
+      nullable: true
+      default: RC1
+      description: nullable enum
+      example: RC2
+      enum:
+      - RC1
+      - RC1N
+      - RC2
+      - RC2N
+      - RC3
+      - RC4
+      - null
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/example.yaml 0.45.0-1/tests/data/openapi/example.yaml
--- 0.26.4-3/tests/data/openapi/example.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/example.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,176 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+    Users:
+      type: array
+      items:
+        required:
+          - id
+          - name
+        properties:
+          id:
+            type: integer
+            format: int64
+          name:
+            type: string
+          tag:
+            type: string
+    Id:
+      type: string
+    Rules:
+      type: array
+      items:
+        type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    apis:
+      type: array
+      items:
+        type: object
+        properties:
+          apiKey:
+            type: string
+            description: To be used as a dataset parameter value
+          apiVersionNumber:
+            type: string
+            description: To be used as a version parameter value
+          apiUrl:
+            type: string
+            format: uri
+            description: "The URL describing the dataset's fields"
+          apiDocumentationUrl:
+            type: string
+            format: uri
+            description: A URL to the API console for each API
+    Event:
+      type: object
+      properties:
+        name:
+          type: string
+    Result:
+        type: object
+        properties:
+          event:
+            $ref: '#/components/schemas/Event'
diff -pruN 0.26.4-3/tests/data/openapi/exclusive.yaml 0.45.0-1/tests/data/openapi/exclusive.yaml
--- 0.26.4-3/tests/data/openapi/exclusive.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/exclusive.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,42 @@
+openapi: 3.0.3
+info:
+  version: 1.0.0
+  title: Bug
+servers: []
+paths: {}
+components:
+  schemas:
+    MaximumProblem:
+      type: object
+      properties:
+        status:
+          type: integer
+          minimum: 100
+          maximum: 600
+          exclusiveMaximum: true
+    MinimumProblem:
+      type: object
+      properties:
+        status:
+          type: integer
+          minimum: 100
+          maximum: 600
+          exclusiveMinimum: true
+    MinimumMaximumProblem:
+      type: object
+      properties:
+        status:
+          type: integer
+          minimum: 100
+          maximum: 600
+          exclusiveMinimum: true
+          exclusiveMaximum: true
+    Problem:
+      type: object
+      properties:
+        status:
+          type: integer
+          minimum: 100
+          maximum: 600
+          exclusiveMinimum: false
+          exclusiveMaximum: false
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/external_ref_with_transitive_local_ref/openapi.yaml 0.45.0-1/tests/data/openapi/external_ref_with_transitive_local_ref/openapi.yaml
--- 0.26.4-3/tests/data/openapi/external_ref_with_transitive_local_ref/openapi.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/external_ref_with_transitive_local_ref/openapi.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,38 @@
+openapi: 3.1.0
+info:
+  title: Feedback API
+  version: 1.0.0
+paths:
+  /feedback:
+    get:
+      summary: Get all feedback items
+      responses:
+        '200':
+          description: List of feedback items
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  $ref: './schemas/feedback.yaml#/FeedbackItem'
+    post:
+      summary: Create a feedback item
+      requestBody:
+        required: true
+        content:
+          application/json:
+            schema:
+              $ref: './schemas/feedback.yaml#/FeedbackItemCreate'
+      responses:
+        '201':
+          description: Created feedback item
+          content:
+            application/json:
+              schema:
+                $ref: './schemas/feedback.yaml#/FeedbackItem'
+components:
+  schemas:
+    FeedbackItem:
+      $ref: './schemas/feedback.yaml#/FeedbackItem'
+    FeedbackItemCreate:
+      $ref: './schemas/feedback.yaml#/FeedbackItemCreate'
diff -pruN 0.26.4-3/tests/data/openapi/external_ref_with_transitive_local_ref/schemas/feedback.yaml 0.45.0-1/tests/data/openapi/external_ref_with_transitive_local_ref/schemas/feedback.yaml
--- 0.26.4-3/tests/data/openapi/external_ref_with_transitive_local_ref/schemas/feedback.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/external_ref_with_transitive_local_ref/schemas/feedback.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,31 @@
+FeedbackItemBase:
+  type: object
+  properties:
+    id:
+      type: integer
+    message:
+      type: string
+  required:
+    - id
+    - message
+
+FeedbackItem:
+  allOf:
+    - $ref: '#/FeedbackItemBase'
+    - type: object
+      properties:
+        created_at:
+          type: string
+          format: date-time
+      required:
+        - created_at
+
+FeedbackItemCreate:
+  allOf:
+    - $ref: '#/FeedbackItemBase'
+    - type: object
+      properties:
+        user_id:
+          type: integer
+      required:
+        - user_id
diff -pruN 0.26.4-3/tests/data/openapi/external_relative_ref/model_a/types.openapi.yaml 0.45.0-1/tests/data/openapi/external_relative_ref/model_a/types.openapi.yaml
--- 0.26.4-3/tests/data/openapi/external_relative_ref/model_a/types.openapi.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/external_relative_ref/model_a/types.openapi.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,20 @@
+
+openapi: 3.0.3
+info:
+  title: Model A definitions
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    model_a.input:
+      type: object
+      properties:
+        name:
+          type: string
+    model_a.output:
+      type: object
+      properties:
+        output:
+          type: string
+        input:
+          $ref: "#/components/schemas/model_a.input"
diff -pruN 0.26.4-3/tests/data/openapi/external_relative_ref/model_b/module.openapi.yaml 0.45.0-1/tests/data/openapi/external_relative_ref/model_b/module.openapi.yaml
--- 0.26.4-3/tests/data/openapi/external_relative_ref/model_b/module.openapi.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/external_relative_ref/model_b/module.openapi.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+openapi: 3.0.3
+info:
+  title: Model B definitions
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    modules.quality_evaluation.QualityEvaluationRequest:
+      type: object
+      properties:
+        input:
+          $ref: "../model_a/types.openapi.yaml#/components/schemas/model_a.output"
diff -pruN 0.26.4-3/tests/data/openapi/extra_data.json 0.45.0-1/tests/data/openapi/extra_data.json
--- 0.26.4-3/tests/data/openapi/extra_data.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/extra_data.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+{
+  "Pet": {
+    "comment": "1 2, 1 2, this is just a pet",
+    "config":{
+      "arbitrary_types_allowed": "True",
+      "coerce_numbers_to_str": "True"}
+  }
+}
diff -pruN 0.26.4-3/tests/data/openapi/extra_data_msgspec.json 0.45.0-1/tests/data/openapi/extra_data_msgspec.json
--- 0.26.4-3/tests/data/openapi/extra_data_msgspec.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/extra_data_msgspec.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+{
+    "#all#": {
+        "base_class_kwargs": {
+            "omit_defaults": true
+        }
+    }
+}
diff -pruN 0.26.4-3/tests/data/openapi/flat_type.jsonschema 0.45.0-1/tests/data/openapi/flat_type.jsonschema
--- 0.26.4-3/tests/data/openapi/flat_type.jsonschema	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/flat_type.jsonschema	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+{
+  "title": "Foo",
+  "$schema": "http://json-schema.org/schema#",
+  "description": "",
+  "type": "object",
+  "properties": {
+    "foo": {
+      "$ref": "#/definitions/foo"
+    }
+  },
+  "definitions": {
+    "foo": {
+      "type": "string"
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/openapi/include_path_parameters.yaml 0.45.0-1/tests/data/openapi/include_path_parameters.yaml
--- 0.26.4-3/tests/data/openapi/include_path_parameters.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/include_path_parameters.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,43 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: API with Path Parameters
+paths:
+  /users/{userId}/posts/{postId}:
+    get:
+      summary: Get a specific post by user
+      operationId: getUserPost
+      parameters:
+        - name: userId
+          in: path
+          required: true
+          schema:
+            type: integer
+        - name: postId
+          in: path
+          required: true
+          schema:
+            type: string
+        - name: includeComments
+          in: query
+          required: false
+          schema:
+            type: boolean
+      responses:
+        '200':
+          description: A post
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Post"
+components:
+  schemas:
+    Post:
+      type: object
+      properties:
+        id:
+          type: string
+        title:
+          type: string
+        content:
+          type: string
diff -pruN 0.26.4-3/tests/data/openapi/inheritance.yaml 0.45.0-1/tests/data/openapi/inheritance.yaml
--- 0.26.4-3/tests/data/openapi/inheritance.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/inheritance.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,28 @@
+openapi: 3.1.0
+components:
+  schemas:
+    Base:
+      required:
+        - id
+      properties:
+        id:
+          type: string
+          format: uuid
+        createdAt:
+          type: string
+          format: date-time
+        version:
+          type: number
+          default: 1
+    Child:
+      allOf:
+        - $ref: "#/components/schemas/Base"
+        - properties:
+            url:
+              type: string
+              format: uri
+              default: "https://example.com"
+            title:
+              type: string
+          required:
+            - title
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/invalid.yaml 0.45.0-1/tests/data/openapi/invalid.yaml
--- 0.26.4-3/tests/data/openapi/invalid.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/invalid.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,2 @@
+invalid:
+  openapi
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/json_pointer.yaml 0.45.0-1/tests/data/openapi/json_pointer.yaml
--- 0.26.4-3/tests/data/openapi/json_pointer.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/json_pointer.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,5 @@
+components:
+  schemas:
+    Foo:
+      allOf:
+        - $ref: "root_model.yaml#/Test"
diff -pruN 0.26.4-3/tests/data/openapi/lazy_resolved_models.yaml 0.45.0-1/tests/data/openapi/lazy_resolved_models.yaml
--- 0.26.4-3/tests/data/openapi/lazy_resolved_models.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/lazy_resolved_models.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,95 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    Results:
+        type: object
+        properties:
+          envets:
+            items:
+              $ref: '#/components/schemas/Events'
+          event:
+            items:
+              $ref: '#/components/schemas/Event'
+    Events:
+        type: array
+        items:
+          $ref: '#/components/schemas/Event'
+    Event:
+      type: object
+      properties:
+        name:
+          type: string
+        event:
+          $ref: '#/components/schemas/Event'
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/list.json 0.45.0-1/tests/data/openapi/list.json
--- 0.26.4-3/tests/data/openapi/list.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/list.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1 @@
+["Ceci", "n'est", "pas", "une", "object"]
diff -pruN 0.26.4-3/tests/data/openapi/max_items_enum.yaml 0.45.0-1/tests/data/openapi/max_items_enum.yaml
--- 0.26.4-3/tests/data/openapi/max_items_enum.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/max_items_enum.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,17 @@
+openapi: "3.1.0"
+components:
+  schemas:
+    Foo:
+      type: object
+      properties:
+        bar:
+          type: array
+          items:
+            enum:
+              - hello
+              - goodbye
+            maxLength: 5
+            minLength: 1
+            type: string
+            pattern: "^.*$"
+          maxItems: 3
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/max_min_number.yaml 0.45.0-1/tests/data/openapi/max_min_number.yaml
--- 0.26.4-3/tests/data/openapi/max_min_number.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/max_min_number.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,30 @@
+openapi: 3.0.3
+info:
+  title: Product API
+  version: 1.0
+paths:
+  /product:
+    post:
+      operationId: createProduct
+      description: Create new product
+      requestBody:
+        required: true
+        content:
+          application/json:
+            schema:
+              $ref: '#/components/schemas/product'
+      responses:
+        '200':
+          content:
+            application/json:
+              schema:
+                $ref: '#/components/schemas/product'
+components:
+  schemas:
+    product:
+      type: object
+      properties:
+        price:
+          type: number
+          minimum: -999999.999999
+          maximum: 999999.999999
diff -pruN 0.26.4-3/tests/data/openapi/modular.yaml 0.45.0-1/tests/data/openapi/modular.yaml
--- 0.26.4-3/tests/data/openapi/modular.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/modular.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,287 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Modular Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/collections.Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/collections.Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    models.Species:
+      type: string
+      enum:
+        - dog
+        - cat
+        - snake
+    models.Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+        species:
+          $ref: '#/components/schemas/models.Species'
+    models.User:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    collections.Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/models.Pet"
+    collections.Users:
+      type: array
+      items:
+        $ref: "#/components/schemas/models.User"
+    optional:
+      type: string
+    Id:
+      type: string
+    collections.Rules:
+      type: array
+      items:
+        type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    collections.apis:
+      type: array
+      items:
+        type: object
+        properties:
+          apiKey:
+            type: string
+            description: To be used as a dataset parameter value
+          apiVersionNumber:
+            type: string
+            description: To be used as a version parameter value
+          apiUrl:
+            type: string
+            format: uri
+            description: "The URL describing the dataset's fields"
+          apiDocumentationUrl:
+            type: string
+            format: uri
+            description: A URL to the API console for each API
+          stage:
+            type: string
+            enum: [
+              "test",
+              "dev",
+              "stg",
+              "prod"
+            ]
+    models.Event:
+      type: object
+      properties:
+        name:
+          anyOf:
+            - type: string
+            - type: number
+            - type: integer
+            - type: boolean
+            - type: object
+            - type: array
+              items:
+                type: string
+    Result:
+      type: object
+      properties:
+        event:
+          $ref: '#/components/schemas/models.Event'
+    foo.bar.Thing:
+      properties:
+        attributes:
+          type: object
+    foo.bar.Thang:
+      properties:
+        attributes:
+          type: array
+          items:
+            type: object
+    foo.bar.Clone:
+      allOf:
+        - $ref: '#/components/schemas/foo.bar.Thing'
+        - type: object
+          properties:
+            others:
+              type: object
+              properties:
+                 name:
+                   type: string
+
+    foo.Tea:
+      properties:
+        flavour:
+          type: string
+        id:
+          $ref: '#/components/schemas/Id'
+    Source:
+      properties:
+        country:
+          type: string
+    foo.Cocoa:
+      properties:
+        quality:
+          type: integer
+    bar.Field:
+      type: string
+      example: green
+    woo.boo.Chocolate:
+      properties:
+        flavour:
+          type: string
+        source:
+          $ref: '#/components/schemas/Source'
+        cocoa:
+          $ref: '#/components/schemas/foo.Cocoa'
+        field:
+          $ref: '#/components/schemas/bar.Field'
+    differentTea:
+      type: object
+      properties:
+        foo:
+          $ref: '#/components/schemas/foo.Tea'
+        nested:
+          $ref: '#/components/schemas/nested.foo.Tea'
+    nested.foo.Tea:
+      properties:
+        flavour:
+          type: string
+        id:
+          $ref: '#/components/schemas/Id'
+        self:
+          $ref: '#/components/schemas/nested.foo.Tea'
+        optional:
+          type: array
+          items:
+            $ref: '#/components/schemas/optional'
+    nested.foo.TeaClone:
+      properties:
+        flavour:
+          type: string
+        id:
+          $ref: '#/components/schemas/Id'
+        self:
+          $ref: '#/components/schemas/nested.foo.Tea'
+        optional:
+          type: array
+          items:
+            $ref: '#/components/schemas/optional'
+    nested.foo.List:
+      type: array
+      items:
+        $ref: '#/components/schemas/nested.foo.Tea'
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/module_class_name_collision/openapi.json 0.45.0-1/tests/data/openapi/module_class_name_collision/openapi.json
--- 0.26.4-3/tests/data/openapi/module_class_name_collision/openapi.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/module_class_name_collision/openapi.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,41 @@
+{
+  "openapi": "3.0.1",
+  "info": {
+    "title": "Test API",
+    "version": "1.0.0"
+  },
+  "paths": {
+    "/a": {
+      "get": {
+        "operationId": "getA",
+        "responses": {
+          "200": {
+            "description": "Success",
+            "content": {
+              "application/json": {
+                "schema": {
+                  "type": "array",
+                  "items": {
+                    "$ref": "#/components/schemas/A.A"
+                  }
+                }
+              }
+            }
+          }
+        }
+      }
+    }
+  },
+  "components": {
+    "schemas": {
+      "A.A": {
+        "type": "object",
+        "properties": {
+          "name": {
+            "type": "string"
+          }
+        }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/openapi/module_class_name_collision_deep/openapi.json 0.45.0-1/tests/data/openapi/module_class_name_collision_deep/openapi.json
--- 0.26.4-3/tests/data/openapi/module_class_name_collision_deep/openapi.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/module_class_name_collision_deep/openapi.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,41 @@
+{
+  "openapi": "3.0.1",
+  "info": {
+    "title": "Test API",
+    "version": "1.0.0"
+  },
+  "paths": {
+    "/b": {
+      "get": {
+        "operationId": "getB",
+        "responses": {
+          "200": {
+            "description": "Success",
+            "content": {
+              "application/json": {
+                "schema": {
+                  "type": "array",
+                  "items": {
+                    "$ref": "#/components/schemas/A.B.B"
+                  }
+                }
+              }
+            }
+          }
+        }
+      }
+    }
+  },
+  "components": {
+    "schemas": {
+      "A.B.B": {
+        "type": "object",
+        "properties": {
+          "value": {
+            "type": "integer"
+          }
+        }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/openapi/msgspec_oneof_with_null.yaml 0.45.0-1/tests/data/openapi/msgspec_oneof_with_null.yaml
--- 0.26.4-3/tests/data/openapi/msgspec_oneof_with_null.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/msgspec_oneof_with_null.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,39 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Test oneOf with null
+components:
+  schemas:
+    Model:
+      type: object
+      properties:
+        required_field:
+          type: string
+        optional_oneof_with_null:
+          oneOf:
+            - type: string
+            - type: "null"
+        optional_anyof_with_null:
+          anyOf:
+            - type: string
+            - type: "null"
+        optional_field_not_nullable:
+          type: string
+        optional_oneof_with_null_and_constraint:
+          oneOf:
+            - type: string
+              maxLength: 100
+            - type: "null"
+        optional_nullable_field:
+          type: string
+          nullable: true
+        optional_nullable_with_constraint:
+          type: string
+          nullable: true
+          maxLength: 50
+        optional_nullable_with_min_length:
+          type: string
+          nullable: true
+          minLength: 5
+      required:
+        - required_field
diff -pruN 0.26.4-3/tests/data/openapi/multiple_required_any_of.yaml 0.45.0-1/tests/data/openapi/multiple_required_any_of.yaml
--- 0.26.4-3/tests/data/openapi/multiple_required_any_of.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/multiple_required_any_of.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+openapi: 3.0.0
+info:
+paths:
+components:
+  schemas:
+    Addr:
+      type: object
+      properties:
+        ipv4Addr:
+          $ref: '#/components/schemas/Ipv4Addr'
+        ipv6Addr:
+          $ref: '#/components/schemas/Ipv6Addr'
+      anyOf:
+        - required: [ ipv4Addr ]
+        - required: [ ipv6Addr ]
+    Ipv4Addr:
+      type: string
+      format: ipv4
+    Ipv6Addr:
+      type: string
+      format: ipv6
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/namespace_subns_ref.json 0.45.0-1/tests/data/openapi/namespace_subns_ref.json
--- 0.26.4-3/tests/data/openapi/namespace_subns_ref.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/namespace_subns_ref.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,28 @@
+{
+  "openapi": "3.0.1",
+  "info": {
+    "title": "Test API",
+    "version": "1.0.0"
+  },
+  "paths": {},
+  "components": {
+    "schemas": {
+      "ns.wrapper": {
+        "type": "object",
+        "properties": {
+          "item": {
+            "$ref": "#/components/schemas/ns.subns.item"
+          }
+        }
+      },
+      "ns.subns.item": {
+        "type": "object",
+        "properties": {
+          "name": {
+            "type": "string"
+          }
+        }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/openapi/nested_anyof.yaml 0.45.0-1/tests/data/openapi/nested_anyof.yaml
--- 0.26.4-3/tests/data/openapi/nested_anyof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/nested_anyof.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+openapi: 3.0.0
+info:
+  title: datamodel-code-generator bug example
+components:
+  schemas:
+    Container:
+      allOf:
+        - type: object
+          required:
+            - contents
+          properties:
+            contents:
+              type: array
+              items:
+                anyOf:
+                  - $ref: '#/components/schemas/Type1'
+                  - $ref: '#/components/schemas/Type2'
+    Type1:
+      type: object
+      properties:
+        prop:
+          type: string
+    Type2:
+      type: object
+      properties:
+        prop:
+          type: string
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/nested_enum.json 0.45.0-1/tests/data/openapi/nested_enum.json
--- 0.26.4-3/tests/data/openapi/nested_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/nested_enum.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,63 @@
+{
+    "openapi": "3.0.0",
+    "info": {
+        "title": "Test API",
+        "version": "1.0"
+    },
+    "paths": {},
+    "components": {
+        "schemas": {
+          "Result1": {
+                "type": "object",
+                "description": "description for Result1",
+                "properties": {
+                    "state": {
+                        "$ref": "#/components/schemas/NestedState1"
+                    }
+                },
+                "required": [
+                    "state"
+                ]
+            },
+            "Result2": {
+                "type": "object",
+                "description": "description for Result2",
+                "properties": {
+                    "state": {
+                        "$ref": "#/components/schemas/NestedState2"
+                    }
+                },
+                "required": [
+                    "state"
+                ]
+            },
+            "NestedState1": {
+                "allOf": [
+                    {
+                        "description": "description for NestedState1"
+                    },
+                    {
+                        "$ref": "#/components/schemas/State"
+                    }
+                ]
+            },
+            "NestedState2": {
+                "allOf": [
+                    {
+                        "description": "description for NestedState2"
+                    },
+                    {
+                        "$ref": "#/components/schemas/State"
+                    }
+                ]
+            },
+            "State": {
+                "type": "string",
+                "enum": [
+                    "1",
+                    "2"
+                ]
+            }
+        }
+    }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/nested_oneof.yaml 0.45.0-1/tests/data/openapi/nested_oneof.yaml
--- 0.26.4-3/tests/data/openapi/nested_oneof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/nested_oneof.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,27 @@
+openapi: 3.0.0
+info:
+  title: datamodel-code-generator bug example
+components:
+  schemas:
+    Container:
+      allOf:
+        - type: object
+          required:
+            - contents
+          properties:
+            contents:
+              type: array
+              items:
+                oneOf:
+                  - $ref: '#/components/schemas/Type1'
+                  - $ref: '#/components/schemas/Type2'
+    Type1:
+      type: object
+      properties:
+        prop:
+          type: string
+    Type2:
+      type: object
+      properties:
+        prop:
+          type: string
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/nested_package_enum_default.json 0.45.0-1/tests/data/openapi/nested_package_enum_default.json
--- 0.26.4-3/tests/data/openapi/nested_package_enum_default.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/nested_package_enum_default.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,42 @@
+{
+  "openapi": "3.0.0",
+  "info": {
+    "title": "Test API",
+    "version": "1.0.0"
+  },
+  "paths": {},
+  "components": {
+    "schemas": {
+      "io.example.api.v1.Resolution": {
+        "type": "string",
+        "enum": ["Required", "Optional"],
+        "default": "Required"
+      },
+      "io.example.api.v1.Policy": {
+        "type": "string",
+        "enum": ["Allow", "Deny"],
+        "default": "Allow"
+      },
+      "io.example.api.v1.BucketSpec": {
+        "type": "object",
+        "properties": {
+          "resolution": {
+            "allOf": [
+              {"$ref": "#/components/schemas/io.example.api.v1.Resolution"}
+            ],
+            "default": "Required"
+          },
+          "policy": {
+            "allOf": [
+              {"$ref": "#/components/schemas/io.example.api.v1.Policy"}
+            ],
+            "default": "Allow"
+          },
+          "name": {
+            "type": "string"
+          }
+        }
+      }
+    }
+  }
+}
diff -pruN 0.26.4-3/tests/data/openapi/no_components.yaml 0.45.0-1/tests/data/openapi/no_components.yaml
--- 0.26.4-3/tests/data/openapi/no_components.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/no_components.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,99 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
diff -pruN 0.26.4-3/tests/data/openapi/non_operations_and_security.yaml 0.45.0-1/tests/data/openapi/non_operations_and_security.yaml
--- 0.26.4-3/tests/data/openapi/non_operations_and_security.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/non_operations_and_security.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,72 @@
+openapi: 3.1.0
+info:
+  title: API with Non-Operations and Security
+  version: 1.0.0
+security:
+  - api_key: []
+paths:
+  /pets:
+    summary: "This is not an operation"
+    description: "Also not an operation"
+    parameters:
+      - name: test_param
+        in: query
+        schema:
+          type: string
+    get:
+      summary: Get pets
+      responses:
+        '200':
+          description: Success
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  $ref: '#/components/schemas/Pet'
+    post:
+      summary: Create pet
+      security:
+        - oauth2: ['write']
+      requestBody:
+        content:
+          application/json:
+            schema:
+              $ref: '#/components/schemas/Pet'
+      responses:
+        '201':
+          description: Created
+webhooks:
+  pet.created:
+    summary: "Not an operation for webhooks"
+    description: "Also not an operation for webhooks"
+    post:
+      requestBody:
+        content:
+          application/json:
+            schema:
+              $ref: '#/components/schemas/Pet'
+      responses:
+        '200':
+          description: Success
+components:
+  schemas:
+    Pet:
+      type: object
+      properties:
+        id:
+          type: integer
+        name:
+          type: string
+  securitySchemes:
+    api_key:
+      type: apiKey
+      in: header
+      name: X-API-Key
+    oauth2:
+      type: oauth2
+      flows:
+        clientCredentials:
+          tokenUrl: /token
+          scopes:
+            write: Write access
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/not.json 0.45.0-1/tests/data/openapi/not.json
--- 0.26.4-3/tests/data/openapi/not.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/not.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1 @@
+This is not JSON!
diff -pruN 0.26.4-3/tests/data/openapi/not_real_string.json 0.45.0-1/tests/data/openapi/not_real_string.json
--- 0.26.4-3/tests/data/openapi/not_real_string.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/not_real_string.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,61 @@
+{
+  "openapi" : "3.0.0",
+  "components" : {
+    "schemas" : {
+      "UserId" : {
+        "type" : "string"
+      },
+      "Tweet" : {
+        "type" : "object",
+        "properties" : {
+          "author_id" : {
+            "$ref" : "#/components/schemas/UserId"
+          }
+        }
+      },
+      "Users": {
+        "type": "array",
+        "items": [{
+          "$ref":  "#/components/schemas/UserId"
+        }]
+      },
+      "FileHash": {
+        "type": "string",
+        "minLength": 32,
+        "maxLength": 32,
+        "pattern": "^[a-fA-F\\d]{32}$",
+        "description": "For file"
+      },
+      "ImageHash": {
+        "$ref": "#/components/schemas/FileHash",
+        "maxLength": 64,
+        "minLength": 64
+      },
+       "FileRequest": {
+        "type": "object",
+        "required": ["file_hash"],
+        "properties": {
+          "file_hash": {
+            "$ref": "#/components/schemas/FileHash"
+          }
+        }
+      },
+      "ImageRequest": {
+        "type": "object",
+        "required": ["file_hash"],
+        "properties": {
+          "image_hash": {
+            "$ref": "#/components/schemas/ImageHash",
+            "description": "For image"
+          }
+        }
+      },
+      "FileHashes": {
+        "type": "array",
+        "items": {
+           "$ref": "#/components/schemas/FileHash"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/null_only_enum.yaml 0.45.0-1/tests/data/openapi/null_only_enum.yaml
--- 0.26.4-3/tests/data/openapi/null_only_enum.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/null_only_enum.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+openapi: 3.0.1
+info:
+  title: Null Only Enum Test
+  version: 1.0.0
+paths: {}
+components:
+  schemas:
+    NullEnum:
+      type: string
+      enum:
+        - null
+      nullable: true
diff -pruN 0.26.4-3/tests/data/openapi/nullable.yaml 0.45.0-1/tests/data/openapi/nullable.yaml
--- 0.26.4-3/tests/data/openapi/nullable.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/nullable.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,122 @@
+openapi: 3.0.3
+info:
+  version: 1.0.0
+  title: testapi
+  license:
+    name: proprietary
+servers: []
+paths: {}
+components:
+  schemas:
+    TopLevel:
+      type: object
+      properties:
+        cursors:
+          type: object
+          properties:
+            prev:
+              type: string
+              nullable: true
+            next:
+              type: string
+              default: last
+            index:
+              type: number
+            tag:
+              type: string
+          required:
+          - prev
+          - index
+      required:
+      - cursors
+    User:
+      type: object
+      properties:
+        info:
+          type: object
+          properties:
+            name:
+              type: string
+          required:
+            - name
+      required:
+        - info
+    apis:
+      type: array
+      nullable: true
+      items:
+        type: object
+        properties:
+          apiKey:
+            type: string
+            description: To be used as a dataset parameter value
+          apiVersionNumber:
+            type: string
+            description: To be used as a version parameter value
+          apiUrl:
+            type: string
+            format: uri
+            description: "The URL describing the dataset's fields"
+            nullable: true
+          apiDocumentationUrl:
+            type: string
+            format: uri
+            description: A URL to the API console for each API
+            nullable: true
+    email:
+      type: array
+      items:
+        type: object
+        properties:
+          author:
+            type: string
+          address:
+            type: string
+            description: email address
+          description:
+            type: string
+            default: empty
+          tag:
+            type: string
+        required:
+          - author
+          - address
+    id:
+      type: integer
+      default: 1
+    description:
+      type: string
+      nullable: true
+      default: example
+    name:
+      type: string
+      nullable: true
+    tag:
+      type: string
+    notes:
+      type: object
+      properties:
+        comments:
+          type: array
+          items:
+              type: string
+          default_factory: list
+          nullable: false
+    options:
+      type: object
+      properties:
+        comments:
+          type: array
+          items:
+              type: string
+              nullable: true
+        oneOfComments:
+           type: array
+           items:
+               oneOf:
+                - type: string
+                - type: number
+               nullable: true
+      required:
+        - comments
+        - oneOfComments
diff -pruN 0.26.4-3/tests/data/openapi/nullable_31.yaml 0.45.0-1/tests/data/openapi/nullable_31.yaml
--- 0.26.4-3/tests/data/openapi/nullable_31.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/nullable_31.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+openapi: 3.1.0
+info:
+  version: 1.0.0
+  title: testapi
+  license:
+    name: proprietary
+servers: []
+paths: {}
+components:
+  schemas:
+    Basket:
+      type: object
+      properties:
+        apples:
+          type:
+            - array
+            - 'null'
+          items:
+            $ref: '#/components/schemas/Apple'
+      required:
+        - apples
+    Apple:
+      type: object
diff -pruN 0.26.4-3/tests/data/openapi/oas_response_reference.yaml 0.45.0-1/tests/data/openapi/oas_response_reference.yaml
--- 0.26.4-3/tests/data/openapi/oas_response_reference.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/oas_response_reference.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,75 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+  description: |
+    This description is for testing
+    multi-line
+    description
+
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            default: 0
+            type: integer
+            format: int32
+      responses:
+        '200':
+          $ref: '#/components/responses/Pet'
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+components:
+  responses:
+    Pet:
+      description: A paged array of pets
+      headers:
+        x-next:
+          description: A link to the next page of responses
+          schema:
+            type: string
+      content:
+        application/json:
+          schema:
+            items:
+              $ref: "#/components/schemas/Pet"
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/oneof.yaml 0.45.0-1/tests/data/openapi/oneof.yaml
--- 0.26.4-3/tests/data/openapi/oneof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/oneof.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,183 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+    post:
+      summary: Create a pet
+      operationId: createPets
+      tags:
+        - pets
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+  /pets/{petId}:
+    get:
+      summary: Info for a specific pet
+      operationId: showPetById
+      tags:
+        - pets
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The id of the pet to retrieve
+          schema:
+            type: string
+      responses:
+        '200':
+          description: Expected response to a valid request
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    x-amazon-apigateway-integration:
+      uri:
+        Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+      passthroughBehavior: when_no_templates
+      httpMethod: POST
+      type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Car:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    OneOfItem:
+      oneOf:
+        - $ref: "#/components/schemas/Pet"
+        - $ref: "#/components/schemas/Car"
+        - type: object
+          properties:
+              name:
+                type: string
+        - type: string
+          maxLength: 5000
+    OneOfobj:
+      type: object
+      properties:
+        item:
+          oneOf:
+            - $ref: "#/components/schemas/Pet"
+            - $ref: "#/components/schemas/Car"
+            - type: object
+              properties:
+                name:
+                  type: string
+            - type: string
+              maxLength: 5000
+    OneOfArray:
+      type: array
+      items:
+          oneOf:
+            - $ref: "#/components/schemas/Pet"
+            - $ref: "#/components/schemas/Car"
+            - type: object
+              properties:
+                name:
+                  type: string
+                birthday:
+                  type: string
+                  format: date
+            - type: string
+              maxLength: 5000
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    Config:
+      properties:
+        setting:
+          type: object
+          additionalProperties:
+            oneOf:
+            - type: string
+            - type: array
+              items:
+                type: string
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/override_required_all_of.yaml 0.45.0-1/tests/data/openapi/override_required_all_of.yaml
--- 0.26.4-3/tests/data/openapi/override_required_all_of.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/override_required_all_of.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,41 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    ObjectBase:
+      description: Object schema
+      type: object
+      properties:
+        name:
+          description: Name of the object
+          type: string
+        type:
+          description: Object type
+          type: string
+          enum:
+            - my_first_object
+            - my_second_object
+        rank:
+          description: User rank
+          type:
+            - integer
+            - number
+        allIn:
+          oneOf:
+            - $ref: '#/components/schemas/ObjectBase/properties/name'
+            - $ref: '#/components/schemas/ObjectBase/properties/type'
+            - $ref: '#/components/schemas/ObjectBase/properties/rank'
+    CreateObjectRequest:
+      description: Request schema for object creation
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
+      required:
+        - name
+        - type
+        - rank
+        - allIn
+    UpdateObjectRequest:
+      description: Request schema for object updates
+      type: object
+      allOf:
+        - $ref: '#/components/schemas/ObjectBase'
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/paths_external_ref/openapi.yaml 0.45.0-1/tests/data/openapi/paths_external_ref/openapi.yaml
--- 0.26.4-3/tests/data/openapi/paths_external_ref/openapi.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/paths_external_ref/openapi.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+openapi: 3.0.3
+info:
+  title: Paths External Ref Test
+  version: "1.0"
+paths:
+  /cats:
+    get:
+      operationId: getCats
+      responses:
+        "200":
+          description: List of cats
+          content:
+            application/json:
+              schema:
+                $ref: "./schemas/cat.yaml#/Cat"
+    post:
+      operationId: createCat
+      requestBody:
+        content:
+          application/json:
+            schema:
+              $ref: "./schemas/cat.yaml#/Cat"
+      responses:
+        "201":
+          description: Created
diff -pruN 0.26.4-3/tests/data/openapi/paths_external_ref/schemas/cat.yaml 0.45.0-1/tests/data/openapi/paths_external_ref/schemas/cat.yaml
--- 0.26.4-3/tests/data/openapi/paths_external_ref/schemas/cat.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/paths_external_ref/schemas/cat.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+Cat:
+  type: object
+  properties:
+    name:
+      type: string
+    breed:
+      type: string
diff -pruN 0.26.4-3/tests/data/openapi/paths_ref_with_external_schema/apis/pets/index.yaml 0.45.0-1/tests/data/openapi/paths_ref_with_external_schema/apis/pets/index.yaml
--- 0.26.4-3/tests/data/openapi/paths_ref_with_external_schema/apis/pets/index.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/paths_ref_with_external_schema/apis/pets/index.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,35 @@
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      responses:
+        '200':
+          description: A list of pets
+          content:
+            application/json:
+              schema:
+                $ref: "./model.yaml#/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "../../openapi.yaml#/components/schemas/Error"
+    post:
+      summary: Create a pet
+      operationId: createPet
+      requestBody:
+        content:
+          application/json:
+            schema:
+              $ref: "./model.yaml#/Pet"
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "../../openapi.yaml#/components/schemas/Error"
diff -pruN 0.26.4-3/tests/data/openapi/paths_ref_with_external_schema/apis/pets/model.yaml 0.45.0-1/tests/data/openapi/paths_ref_with_external_schema/apis/pets/model.yaml
--- 0.26.4-3/tests/data/openapi/paths_ref_with_external_schema/apis/pets/model.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/paths_ref_with_external_schema/apis/pets/model.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+Pet:
+  type: object
+  required:
+    - id
+    - name
+  properties:
+    id:
+      type: integer
+      format: int64
+    name:
+      type: string
+    tag:
+      type: string
+
+Pets:
+  type: array
+  items:
+    $ref: "#/Pet"
diff -pruN 0.26.4-3/tests/data/openapi/paths_ref_with_external_schema/openapi.yaml 0.45.0-1/tests/data/openapi/paths_ref_with_external_schema/openapi.yaml
--- 0.26.4-3/tests/data/openapi/paths_ref_with_external_schema/openapi.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/paths_ref_with_external_schema/openapi.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Test API
+paths:
+  /pets:
+    $ref: "./apis/pets/index.yaml#/paths/~1pets"
+components:
+  schemas:
+    Error:
+      type: object
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+        message:
+          type: string
diff -pruN 0.26.4-3/tests/data/openapi/pattern.yaml 0.45.0-1/tests/data/openapi/pattern.yaml
--- 0.26.4-3/tests/data/openapi/pattern.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/pattern.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+components:
+  schemas:
+    info:
+      type: object
+      properties:
+        hostName:
+          type: string
+          format: hostname
+        arn:
+          type: string
+          pattern: '(^arn:([^:]*):([^:]*):([^:]*):(|\*|[\d]{12}):(.+)$)|^\*$'
+        tel:
+          type: string
+          pattern: '^(\([0-9]{3}\))?[0-9]{3}-[0-9]{4}$'
+        comment:
+          type: string
+          pattern: '[^\b\f\n\r\t\\a+.?''"|()]+$'
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/pattern_lookaround.yaml 0.45.0-1/tests/data/openapi/pattern_lookaround.yaml
--- 0.26.4-3/tests/data/openapi/pattern_lookaround.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/pattern_lookaround.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+components:
+  schemas:
+    info:
+      type: object
+      properties:
+        name:
+          type: string
+          pattern: '.*foo.*(?<!baz)bar.*'
diff -pruN 0.26.4-3/tests/data/openapi/query_parameters.yaml 0.45.0-1/tests/data/openapi/query_parameters.yaml
--- 0.26.4-3/tests/data/openapi/query_parameters.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/query_parameters.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,197 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+  description: |
+    This description is for testing
+    multi-line
+    description
+
+servers:
+  - url: http://petstore.swagger.io/v1
+security:
+  - BearerAuth: []
+paths:
+  /pets/{petId}:
+    get:
+      summary: Get a pet by ID
+      operationId: getPet
+      parameters:
+        - name: petId
+          in: path
+          required: true
+          description: The pet ID
+          schema:
+            type: string
+        - name: include
+          in: query
+          required: false
+          description: Include additional data
+          schema:
+            type: string
+      responses:
+        '200':
+          description: A pet
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pet"
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            default: 0
+            type: integer
+        - name: HomeAddress
+          in: query
+          required: false
+          schema:
+            default: 'Unknown'
+            type: string
+        - name: kind
+          in: query
+          required: false
+          schema:
+            default: dog
+            type: string
+        - in: query
+          name: filter
+
+          # Wrap 'schema' into 'content.<media-type>'
+          content:
+            application/json: # <---- media type indicates how to serialize / deserialize the parameter content
+              schema:
+                type: object
+                properties:
+                  type:
+                    type: string
+                  color:
+                    type: string
+        - in: query
+          name: multipleMediaFilter
+
+          # Wrap 'schema' into 'content.<media-type>'
+          content:
+            application/xml: # <---- media type indicates how to serialize / deserialize the parameter content
+              schema:
+                type: object
+                properties:
+                  type:
+                    type: string
+                  media_type:
+                    type: string
+                    enum:
+                      - xml
+                      - json
+                    default: xml
+            application/json: # <---- media type indicates how to serialize / deserialize the parameter content
+              schema:
+                type: object
+                properties:
+                  type:
+                    type: string
+                  media_type:
+                    type: string
+                    enum:
+                      - xml
+                      - json
+                    default: json
+        - in: query
+          name: empty
+          content:
+            application/json:
+              {}
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                type: array
+                items:
+                  $ref: "#/components/schemas/Pet"
+        '500':
+          description: An internal error occurred
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+    post:
+      summary: Create a pet
+      requestBody:
+        required: true
+        content:
+          application/json:
+            schema:
+              type: object
+              properties:
+                name:
+                  type: string
+                age:
+                  type: integer
+      responses:
+        '201':
+          description: Null response
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+components:
+  parameters:
+    MyParam:
+      name: foo
+      in: query
+      schema:
+        type: string
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    PetForm:
+      title: PetForm
+      type: object
+      properties:
+        name:
+          type: string
+        age:
+          type: integer
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only.yaml 0.45.0-1/tests/data/openapi/read_only_write_only.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+openapi: "3.0.0"
+info:
+  title: Read Only Write Only Test API
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    User:
+      type: object
+      required:
+        - id
+        - name
+        - password
+      properties:
+        id:
+          type: integer
+          readOnly: true
+        name:
+          type: string
+        password:
+          type: string
+          writeOnly: true
+        created_at:
+          type: string
+          format: date-time
+          readOnly: true
+        secret_token:
+          type: string
+          writeOnly: true
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_allof.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_allof.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_allof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_allof.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,45 @@
+openapi: "3.0.0"
+info:
+  title: Read Only Write Only AllOf Test API
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    Timestamps:
+      type: object
+      properties:
+        created_at:
+          type: string
+          format: date-time
+          readOnly: true
+        updated_at:
+          type: string
+          format: date-time
+          readOnly: true
+
+    Credentials:
+      type: object
+      properties:
+        password:
+          type: string
+          writeOnly: true
+        api_key:
+          type: string
+          writeOnly: true
+
+    User:
+      allOf:
+        - $ref: "#/components/schemas/Timestamps"
+        - $ref: "#/components/schemas/Credentials"
+        - type: object
+          required:
+            - id
+            - name
+          properties:
+            id:
+              type: integer
+              readOnly: true
+            name:
+              type: string
+            email:
+              type: string
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_allof_order.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_allof_order.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_allof_order.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_allof_order.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,30 @@
+openapi: "3.0.0"
+info:
+  title: Read Only Write Only AllOf Order Test API
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    # Child is listed first but references Parent via allOf
+    Child:
+      type: object
+      allOf:
+        - $ref: "#/components/schemas/Parent"
+      properties:
+        child_field:
+          type: string
+    # Parent has readOnly/writeOnly fields
+    Parent:
+      type: object
+      required:
+        - id
+      properties:
+        id:
+          type: integer
+        created_at:
+          type: string
+          format: date-time
+          readOnly: true
+        secret:
+          type: string
+          writeOnly: true
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_allof_required_only.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_allof_required_only.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_allof_required_only.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_allof_required_only.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,29 @@
+openapi: "3.0.0"
+info:
+  title: Read Only Write Only AllOf Required Only Test API
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    # Child references Parent which has an allOf with only 'required' (no ref, no properties)
+    Child:
+      type: object
+      allOf:
+        - $ref: "#/components/schemas/Parent"
+      properties:
+        child_field:
+          type: string
+    # Parent has allOf with just 'required' - no ref, no properties
+    Parent:
+      type: object
+      allOf:
+        - required:
+            - id
+        - type: object
+          properties:
+            id:
+              type: integer
+            created_at:
+              type: string
+              format: date-time
+              readOnly: true
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_allof_url_ref.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_allof_url_ref.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_allof_url_ref.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_allof_url_ref.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+openapi: "3.0.0"
+info:
+  title: Read Only Write Only AllOf URL Ref Test API
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    Child:
+      type: object
+      allOf:
+        - $ref: "https://example.com/common.yaml#/components/schemas/BaseObject"
+      properties:
+        child_field:
+          type: string
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_allof_url_ref_remote.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_allof_url_ref_remote.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_allof_url_ref_remote.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_allof_url_ref_remote.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+openapi: "3.0.0"
+info:
+  title: Common Schemas
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    BaseObject:
+      type: object
+      required:
+        - id
+      properties:
+        id:
+          type: integer
+        created_at:
+          type: string
+          format: date-time
+          readOnly: true
+        password:
+          type: string
+          writeOnly: true
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_anyof.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_anyof.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_anyof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_anyof.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+openapi: "3.0.0"
+info:
+  title: Read Only Write Only AnyOf Test API
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    Pet:
+      type: object
+      required:
+        - id
+      properties:
+        id:
+          type: integer
+        # Field with anyOf containing readOnly
+        status:
+          anyOf:
+            - type: string
+              readOnly: true
+            - type: integer
+        # Field with oneOf containing writeOnly
+        token:
+          oneOf:
+            - type: string
+              writeOnly: true
+            - type: integer
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_collision.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_collision.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_collision.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_collision.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+openapi: "3.0.0"
+info:
+  title: Read Only Write Only Collision Test API
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    UserRequest:
+      type: object
+      properties:
+        existing_field:
+          type: string
+    User:
+      type: object
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          readOnly: true
+        name:
+          type: string
+        password:
+          type: string
+          writeOnly: true
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_double_collision.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_double_collision.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_double_collision.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_double_collision.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,31 @@
+openapi: "3.0.0"
+info:
+  title: Read Only Write Only Double Collision Test API
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    UserRequest:
+      type: object
+      properties:
+        existing_field:
+          type: string
+    UserRequestModel:
+      type: object
+      properties:
+        another_field:
+          type: string
+    User:
+      type: object
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          readOnly: true
+        name:
+          type: string
+        password:
+          type: string
+          writeOnly: true
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_duplicate_allof_ref.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_duplicate_allof_ref.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_duplicate_allof_ref.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_duplicate_allof_ref.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,34 @@
+openapi: "3.0.0"
+info:
+  title: Read Only Write Only Duplicate AllOf Ref Test API
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    Base:
+      type: object
+      properties:
+        id:
+          type: integer
+          readOnly: true
+        name:
+          type: string
+    # Parent has allOf with duplicate $ref - when loaded via $ref, this triggers line 750
+    Parent:
+      type: object
+      allOf:
+        - $ref: "#/components/schemas/Base"
+        # Duplicate reference within a loaded schema - this triggers the continue at line 750
+        - $ref: "#/components/schemas/Base"
+      properties:
+        parent_field:
+          type: string
+    # Child references Parent which has duplicate allOf refs
+    Child:
+      type: object
+      allOf:
+        - $ref: "#/components/schemas/Parent"
+      properties:
+        extra:
+          type: string
+          writeOnly: true
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_empty_base.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_empty_base.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_empty_base.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_empty_base.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,30 @@
+openapi: "3.0.0"
+info:
+  title: Read Only Write Only Empty Base Test API
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    # Empty base class (no properties)
+    EmptyBase:
+      type: object
+      # No properties - this creates a model with no fields
+    # HasFieldBase has a field
+    HasFieldBase:
+      type: object
+      properties:
+        base_field:
+          type: string
+    # Child inherits from EmptyBase first, then uses required from HasFieldBase
+    # The required field lookup will check EmptyBase first (no fields)
+    Child:
+      type: object
+      allOf:
+        - $ref: "#/components/schemas/EmptyBase"
+        - $ref: "#/components/schemas/HasFieldBase"
+        - required:
+            - base_field
+      properties:
+        id:
+          type: integer
+          readOnly: true
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_mixed.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_mixed.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_mixed.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_mixed.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+openapi: "3.0.0"
+info:
+  title: Mixed Schema Test
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    User:
+      type: object
+      required:
+        - name
+      properties:
+        id:
+          type: integer
+          readOnly: true
+        name:
+          type: string
+        password:
+          type: string
+          writeOnly: true
+    Address:
+      type: object
+      required:
+        - city
+      properties:
+        street:
+          type: string
+        city:
+          type: string
+    Product:
+      type: object
+      properties:
+        id:
+          type: integer
+          readOnly: true
+        name:
+          type: string
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_nested_allof.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_nested_allof.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_nested_allof.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_nested_allof.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,35 @@
+openapi: "3.0.0"
+info:
+  title: Read Only Write Only Nested AllOf Test API
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    BaseTimestamps:
+      type: object
+      properties:
+        created_at:
+          type: string
+          format: date-time
+          readOnly: true
+    ExtendedTimestamps:
+      allOf:
+        - $ref: "#/components/schemas/BaseTimestamps"
+        - type: object
+          properties:
+            updated_at:
+              type: string
+              format: date-time
+              readOnly: true
+    User:
+      allOf:
+        - $ref: "#/components/schemas/ExtendedTimestamps"
+        - type: object
+          required:
+            - name
+          properties:
+            name:
+              type: string
+            password:
+              type: string
+              writeOnly: true
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_nested_allof_order.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_nested_allof_order.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_nested_allof_order.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_nested_allof_order.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+openapi: "3.0.0"
+info:
+  title: Read Only Write Only Nested AllOf Order Test API
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    # Child is listed first but references Parent via allOf
+    Child:
+      type: object
+      allOf:
+        - $ref: "#/components/schemas/Parent"
+      properties:
+        child_field:
+          type: string
+    # Parent has allOf referencing GrandParent (to test nested allOf resolution)
+    Parent:
+      type: object
+      allOf:
+        - $ref: "#/components/schemas/GrandParent"
+        - type: object
+          properties:
+            parent_field:
+              type: string
+              writeOnly: true
+    # GrandParent has readOnly fields
+    GrandParent:
+      type: object
+      required:
+        - id
+      properties:
+        id:
+          type: integer
+        created_at:
+          type: string
+          format: date-time
+          readOnly: true
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_ref.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_ref.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_ref.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_ref.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,25 @@
+openapi: "3.0.0"
+info:
+  title: Read Only Write Only Ref Test API
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    ReadOnlyId:
+      type: integer
+      readOnly: true
+    WriteOnlySecret:
+      type: string
+      writeOnly: true
+    User:
+      type: object
+      required:
+        - id
+        - name
+      properties:
+        id:
+          $ref: "#/components/schemas/ReadOnlyId"
+        name:
+          type: string
+        secret:
+          $ref: "#/components/schemas/WriteOnlySecret"
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_ref_with_desc.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_ref_with_desc.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_ref_with_desc.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_ref_with_desc.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,39 @@
+openapi: "3.0.0"
+info:
+  title: Read Only Write Only Ref with Description Test API
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    Address:
+      type: object
+      properties:
+        street:
+          type: string
+        city:
+          type: string
+          readOnly: true
+    Base:
+      type: object
+      properties:
+        base_id:
+          type: integer
+    # User with allOf and properties containing $ref + description
+    # This triggers _parse_object_common_part with obj.properties
+    User:
+      type: object
+      allOf:
+        - $ref: "#/components/schemas/Base"
+      required:
+        - name
+      properties:
+        name:
+          type: string
+        # Property with $ref AND description - makes it a JsonSchemaObject with ref
+        home_address:
+          $ref: "#/components/schemas/Address"
+          description: "User's home address"
+        # Property with $ref AND readOnly
+        work_address:
+          $ref: "#/components/schemas/Address"
+          readOnly: true
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_shared_base_ref.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_shared_base_ref.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_shared_base_ref.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_shared_base_ref.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,43 @@
+openapi: "3.0.0"
+info:
+  title: Read Only Write Only Shared Base Ref Test API
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    # Shared base that will be referenced by multiple parents
+    SharedBase:
+      type: object
+      properties:
+        shared_id:
+          type: integer
+          readOnly: true
+        shared_name:
+          type: string
+    # First parent references SharedBase
+    Parent1:
+      type: object
+      allOf:
+        - $ref: "#/components/schemas/SharedBase"
+      properties:
+        parent1_field:
+          type: string
+    # Second parent also references SharedBase
+    Parent2:
+      type: object
+      allOf:
+        - $ref: "#/components/schemas/SharedBase"
+      properties:
+        parent2_field:
+          type: string
+          writeOnly: true
+    # Child inherits from both Parent1 and Parent2
+    # When collecting fields, SharedBase path will be visited twice
+    Child:
+      type: object
+      allOf:
+        - $ref: "#/components/schemas/Parent1"
+        - $ref: "#/components/schemas/Parent2"
+      properties:
+        child_field:
+          type: string
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_union.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_union.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_union.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_union.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,23 @@
+openapi: "3.0.0"
+info:
+  title: Read Only Write Only Union Test API
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    User:
+      type: object
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          readOnly: true
+        name:
+          type: string
+        metadata:
+          oneOf:
+            - type: string
+            - type: integer
+          writeOnly: true
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_url_ref.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_url_ref.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_url_ref.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_url_ref.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+openapi: "3.0.0"
+info:
+  title: Read Only Write Only URL Ref Test API
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    User:
+      type: object
+      required:
+        - name
+      properties:
+        name:
+          type: string
+        created_at:
+          $ref: "https://example.com/common.yaml#/components/schemas/ReadOnlyTimestamp"
+        secret:
+          $ref: "https://example.com/common.yaml#/components/schemas/WriteOnlySecret"
diff -pruN 0.26.4-3/tests/data/openapi/read_only_write_only_url_ref_remote.yaml 0.45.0-1/tests/data/openapi/read_only_write_only_url_ref_remote.yaml
--- 0.26.4-3/tests/data/openapi/read_only_write_only_url_ref_remote.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/read_only_write_only_url_ref_remote.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+openapi: "3.0.0"
+info:
+  title: Common Schemas
+  version: "1.0"
+paths: {}
+components:
+  schemas:
+    ReadOnlyTimestamp:
+      type: string
+      format: date-time
+      readOnly: true
+    WriteOnlySecret:
+      type: string
+      writeOnly: true
diff -pruN 0.26.4-3/tests/data/openapi/ref_nullable.yaml 0.45.0-1/tests/data/openapi/ref_nullable.yaml
--- 0.26.4-3/tests/data/openapi/ref_nullable.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/ref_nullable.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,28 @@
+openapi: 3.0.0
+info:
+  title: Test API
+  version: 1.0.0
+paths: {}
+components:
+  schemas:
+    NullableChild:
+      nullable: true
+      type: object
+      properties:
+        name:
+          type: string
+    NonNullableChild:
+      type: object
+      properties:
+        name:
+          type: string
+    Parent:
+      type: object
+      required:
+        - nullableChild
+        - nonNullableChild
+      properties:
+        nullableChild:
+          $ref: "#/components/schemas/NullableChild"
+        nonNullableChild:
+          $ref: "#/components/schemas/NonNullableChild"
diff -pruN 0.26.4-3/tests/data/openapi/reference_same_hierarchy_directory/common/cat.yaml 0.45.0-1/tests/data/openapi/reference_same_hierarchy_directory/common/cat.yaml
--- 0.26.4-3/tests/data/openapi/reference_same_hierarchy_directory/common/cat.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/reference_same_hierarchy_directory/common/cat.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+# ./common/cat.yaml
+CatInfo:
+  type: object
+  required:
+    - cat_id
+  properties:
+    cat_id:
+      type: string
+      description: ID of this cat
+    details:
+      $ref: "#/CatDetails"
+
+CatDetails:
+  type: object
+  required:
+    - name
+    - birthYear
+  properties:
+    name:
+      type: string
+      description: Name of this cat
+    birthYear:
+      type: number
+      description: Year of this cat's birth
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/reference_same_hierarchy_directory/public/entities.yaml 0.45.0-1/tests/data/openapi/reference_same_hierarchy_directory/public/entities.yaml
--- 0.26.4-3/tests/data/openapi/reference_same_hierarchy_directory/public/entities.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/reference_same_hierarchy_directory/public/entities.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,9 @@
+# ./public/entities.yaml
+openapi: 3.0.3
+info:
+  title: "Entity Schemas"
+paths: {}
+components:
+  schemas:
+    CatInfo:
+      $ref: "../common/cat.yaml#/CatInfo"
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/reference_to_object_properties.yaml 0.45.0-1/tests/data/openapi/reference_to_object_properties.yaml
--- 0.26.4-3/tests/data/openapi/reference_to_object_properties.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/reference_to_object_properties.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,38 @@
+openapi: "3.0.3"
+info:
+  title: Example
+  version: 1.0.0
+
+paths: {}
+
+components:
+  schemas:
+    Id:
+      type: string
+    Parent:
+      type: object
+      properties:
+        id:
+          $ref: "#/components/schemas/Id"
+        name:
+          type: string
+        pet:
+          $ref: "#/components/schemas/Pet"
+    Child:
+      type: object
+      properties:
+        id:
+          $ref: "#/components/schemas/Id"
+        parent_id:
+          $ref: "#/components/schemas/Parent/properties/id"
+        name:
+          type: string
+        pet:
+          $ref: "#/components/schemas/Parent/properties/pet"
+    Pet:
+      type: object
+      properties:
+        name:
+          type: string
+        age:
+          type: integer
diff -pruN 0.26.4-3/tests/data/openapi/referenced_default.yaml 0.45.0-1/tests/data/openapi/referenced_default.yaml
--- 0.26.4-3/tests/data/openapi/referenced_default.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/referenced_default.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,22 @@
+openapi: 3.1.0
+info:
+  title: Title
+  description: Title
+  version: 1.0.0
+components:
+  schemas:
+    Model:
+      type: "object"
+      properties:
+        settingA:
+          type: "number"
+          default: 5
+          minimum: 0
+          maximum: 10
+        settingB:
+          $ref: "#/components/schemas/ModelSettingB"
+    ModelSettingB:
+      type: "number"
+      default: 5
+      minimum: 0
+      maximum: 10
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/refs.yaml 0.45.0-1/tests/data/openapi/refs.yaml
--- 0.26.4-3/tests/data/openapi/refs.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/refs.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,5 @@
+openapi: 3.0.1
+components:
+  schemas:
+    Problem:
+      $ref: "https://teamdigitale.github.io/openapi/0.0.6/definitions.yaml#/schemas/Problem"
diff -pruN 0.26.4-3/tests/data/openapi/required_null.yaml 0.45.0-1/tests/data/openapi/required_null.yaml
--- 0.26.4-3/tests/data/openapi/required_null.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/required_null.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,18 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    ObjectBase:
+      description: Object schema
+      type: object
+      properties:
+        name:
+          description: Name of the object
+          type: string
+        type:
+          description: Object type
+          type: string
+          enum:
+            - my_first_object
+            - my_second_object
+            - my_third_object
+      required: null
diff -pruN 0.26.4-3/tests/data/openapi/resolved_models.yaml 0.45.0-1/tests/data/openapi/resolved_models.yaml
--- 0.26.4-3/tests/data/openapi/resolved_models.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/resolved_models.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,82 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      parameters:
+        - name: limit
+          in: query
+          description: How many items to return at one time (max 100)
+          required: false
+          schema:
+            type: integer
+            format: int32
+      responses:
+        '200':
+          description: A paged array of pets
+          headers:
+            x-next:
+              description: A link to the next page of responses
+              schema:
+                type: string
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Pets"
+        default:
+          description: unexpected error
+          content:
+            application/json:
+              schema:
+                $ref: "#/components/schemas/Error"
+                x-amazon-apigateway-integration:
+                  uri:
+                    Fn::Sub: arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${PythonVersionFunction.Arn}/invocations
+                  passthroughBehavior: when_no_templates
+                  httpMethod: POST
+                  type: aws_proxy
+components:
+  schemas:
+    Pet:
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    Pets:
+      type: array
+      items:
+        $ref: "#/components/schemas/Pet"
+    Error:
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+          format: int32
+        message:
+          type: string
+    Resolved:
+      type: object
+      properties:
+        resolved:
+          type: array
+          items:
+            type: string
diff -pruN 0.26.4-3/tests/data/openapi/root_model.yaml 0.45.0-1/tests/data/openapi/root_model.yaml
--- 0.26.4-3/tests/data/openapi/root_model.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/root_model.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+TestNested:
+  type: object
+  properties:
+    test_string:
+      type: string
+    nested_nested:
+      $ref: "#/TestNestedNested"
+TestNestedNested:
+  type: object
+  properties:
+    test_nested_nested_string:
+      type: string
+Test:
+  allOf:
+    - $ref: "#/TestNested"
diff -pruN 0.26.4-3/tests/data/openapi/same_name_objects.yaml 0.45.0-1/tests/data/openapi/same_name_objects.yaml
--- 0.26.4-3/tests/data/openapi/same_name_objects.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/same_name_objects.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,15 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+components:
+  schemas:
+    Pets:
+      type: object
+      additionalProperties: false
+    Friends1:
+      $ref: "resolved_models.yaml#/components/schemas/Pets"
+    Friends2:
+      $ref: "resolved_models.yaml#/components/schemas/Pets"
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/serialize_as_any.yaml 0.45.0-1/tests/data/openapi/serialize_as_any.yaml
--- 0.26.4-3/tests/data/openapi/serialize_as_any.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/serialize_as_any.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,49 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: SerializeAsAny Test
+  description: Test schema for SerializeAsAny annotation on types with subtypes
+paths: {}
+components:
+  schemas:
+    User:
+      type: object
+      description: Base user model
+      properties:
+        name:
+          type: string
+          description: User's name
+      required:
+        - name
+
+    AdminUser:
+      allOf:
+        - $ref: '#/components/schemas/User'
+        - type: object
+          description: Admin user with additional permissions
+          properties:
+            admin_level:
+              type: integer
+              description: Admin permission level
+          required:
+            - admin_level
+
+    Container:
+      type: object
+      description: Container that holds user references
+      properties:
+        admin_user_field:
+          $ref: '#/components/schemas/AdminUser'
+          description: Field that should not use SerializeAsAny
+        user_field:
+          $ref: '#/components/schemas/User'
+          description: Field that should use SerializeAsAny
+        user_list:
+          type: array
+          description: List of users that should use SerializeAsAny
+          items:
+            $ref: '#/components/schemas/User'
+      required:
+        - user_field
+        - user_list
+        - admin_user_field
diff -pruN 0.26.4-3/tests/data/openapi/shadowed_imports.yaml 0.45.0-1/tests/data/openapi/shadowed_imports.yaml
--- 0.26.4-3/tests/data/openapi/shadowed_imports.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/shadowed_imports.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+openapi: 3.0.0
+info:
+  title: REST API
+  version: 0.0.1
+servers:
+  - url: https://api.something.com/1
+components:
+  schemas:
+    marketingOptIn:
+      type: object
+      properties:
+        optedIn:
+          type: boolean
+          example: false
+        date:
+          type: string
+          format: date
+          example: '2018-04-26T17:03:25.155Z'
+paths:
+  /actions/:
+    get:
+      parameters:
+        - name: due
+          in: query
+          description: A due date for the card
+          required: false
+          schema:
+            type: string
+            format: date
+      responses:
+        '200':
+          description: Success
diff -pruN 0.26.4-3/tests/data/openapi/special_yaml_keywords.yaml 0.45.0-1/tests/data/openapi/special_yaml_keywords.yaml
--- 0.26.4-3/tests/data/openapi/special_yaml_keywords.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/special_yaml_keywords.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,44 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Swagger Petstore
+  license:
+    name: MIT
+servers:
+  - url: http://petstore.swagger.io/v1
+paths:
+  /pets:
+    get:
+      summary: List all pets
+      operationId: listPets
+      tags:
+        - pets
+      responses:
+        '200':
+          description: A paged array of pets
+components:
+  schemas:
+    None:
+      type: object
+    "false":
+      type: object
+    "True":
+      type: object
+    "on":
+      type: object
+    NestedKeywords:
+      type: object
+      properties:
+        None:
+          $ref: "#/components/schemas/None"
+        "false":
+          $ref: "#/components/schemas/false"
+        "True":
+          $ref: "#/components/schemas/True"
+        "on":
+          $ref: "#/components/schemas/on"
+      required:
+        - None
+        - "false"
+        - "True"
+        - "on"
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/strict_types_field_constraints.yaml 0.45.0-1/tests/data/openapi/strict_types_field_constraints.yaml
--- 0.26.4-3/tests/data/openapi/strict_types_field_constraints.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/strict_types_field_constraints.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+openapi: 3.0.0
+info:
+  title: Test API
+  version: 1.0.0
+paths: {}
+components:
+  schemas:
+    Timestamp:
+      type: integer
+      minimum: 1
+      maximum: 9999999999
+    Score:
+      type: number
+      minimum: 0.0
+      maximum: 100.0
+    Name:
+      type: string
+      minLength: 1
+      maxLength: 100
diff -pruN 0.26.4-3/tests/data/openapi/subclass_enum.json 0.45.0-1/tests/data/openapi/subclass_enum.json
--- 0.26.4-3/tests/data/openapi/subclass_enum.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/subclass_enum.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,37 @@
+{
+  "openapi": "3.0.2",
+  "components": {
+    "schemas": {
+      "ProcessingStatus": {
+        "title": "ProcessingStatus",
+        "enum": [
+          "COMPLETED",
+          "PENDING",
+          "FAILED"
+        ],
+        "type": "string",
+        "description": "The processing status"
+      },
+      "ProcessingTask": {
+        "title": "ProcessingTask",
+        "type": "object",
+        "properties": {
+          "processing_status": {
+            "title": "Status of the task",
+            "allOf": [
+              {
+                "$ref": "#/components/schemas/ProcessingStatus"
+              }
+            ],
+            "default": "COMPLETED"
+          }
+        }
+      },
+    }
+  },
+  "info": {
+    "title": "",
+    "version": ""
+  },
+  "paths": {}
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/type_alias.yaml 0.45.0-1/tests/data/openapi/type_alias.yaml
--- 0.26.4-3/tests/data/openapi/type_alias.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/type_alias.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,32 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: TypeAlias Test
+components:
+  schemas:
+    SimpleString:
+      type: string
+    UnionType:
+      anyOf:
+        - type: string
+        - type: integer
+    ArrayType:
+      type: array
+      items:
+        type: string
+    AnnotatedType:
+      title: MyAnnotatedType
+      anyOf:
+        - type: string
+        - type: boolean
+    ModelWithTypeAliasField:
+      type: object
+      properties:
+        simple_field:
+          $ref: "#/components/schemas/SimpleString"
+        union_field:
+          $ref: "#/components/schemas/UnionType"
+        array_field:
+          $ref: "#/components/schemas/ArrayType"
+        annotated_field:
+          $ref: "#/components/schemas/AnnotatedType"
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/type_alias_cross_module_collision/a.yaml 0.45.0-1/tests/data/openapi/type_alias_cross_module_collision/a.yaml
--- 0.26.4-3/tests/data/openapi/type_alias_cross_module_collision/a.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/type_alias_cross_module_collision/a.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Module A
+components:
+  schemas:
+    Item:
+      type: string
diff -pruN 0.26.4-3/tests/data/openapi/type_alias_cross_module_collision/b.yaml 0.45.0-1/tests/data/openapi/type_alias_cross_module_collision/b.yaml
--- 0.26.4-3/tests/data/openapi/type_alias_cross_module_collision/b.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/type_alias_cross_module_collision/b.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,11 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: Module B
+components:
+  schemas:
+    Item:
+      oneOf:
+        - type: array
+          items:
+            $ref: "#/components/schemas/Item"
diff -pruN 0.26.4-3/tests/data/openapi/type_alias_forward_ref_multiple.yaml 0.45.0-1/tests/data/openapi/type_alias_forward_ref_multiple.yaml
--- 0.26.4-3/tests/data/openapi/type_alias_forward_ref_multiple.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/type_alias_forward_ref_multiple.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,21 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: TypeAlias Forward Ref Multiple
+components:
+  schemas:
+    RegularModel:
+      type: object
+      properties:
+        name:
+          type: string
+    Third:
+      type: string
+    Second:
+      oneOf:
+        - $ref: "#/components/schemas/First"
+        - $ref: "#/components/schemas/Third"
+        - $ref: "#/components/schemas/RegularModel"
+    First:
+      oneOf:
+        - $ref: "#/components/schemas/Second"
diff -pruN 0.26.4-3/tests/data/openapi/type_alias_mutual_recursive.yaml 0.45.0-1/tests/data/openapi/type_alias_mutual_recursive.yaml
--- 0.26.4-3/tests/data/openapi/type_alias_mutual_recursive.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/type_alias_mutual_recursive.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,14 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: TypeAlias Mutual Recursive Test
+components:
+  schemas:
+    NodeA:
+      oneOf:
+        - type: integer
+        - $ref: "#/components/schemas/NodeB"
+    NodeB:
+      oneOf:
+        - type: string
+        - $ref: "#/components/schemas/NodeA"
diff -pruN 0.26.4-3/tests/data/openapi/type_alias_recursive.yaml 0.45.0-1/tests/data/openapi/type_alias_recursive.yaml
--- 0.26.4-3/tests/data/openapi/type_alias_recursive.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/type_alias_recursive.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,79 @@
+openapi: "3.0.0"
+info:
+  version: 1.0.0
+  title: TypeAlias Test
+  description: |-
+    Test cases for recursive models
+components:
+  schemas:
+
+    # ----------------------------------------------------------------
+    # Simple self-referential recursive types
+    # ----------------------------------------------------------------
+
+    File:
+      type: object
+      required:
+        - path
+      properties:
+        path:
+          type: string
+
+    Folder:
+      type: object
+      required:
+        - files
+        - folders
+      properties:
+        address:
+          type: string
+        files:
+          type: array
+          items:
+            $ref: "#/components/schemas/File"
+        subfolders:
+          type: array
+          items:
+            $ref: "#/components/schemas/Folder"
+
+    ElementaryType:
+      nullable: true
+      oneOf:
+        - type: boolean
+        - type: string
+        - type: integer
+        - type: number
+
+    JsonType:
+      oneOf:
+        - $ref: "#/components/schemas/ElementaryType"
+        - type: array
+          items:
+            $ref: "#/components/schemas/JsonType"
+        - type: object
+          additionalProperties:
+            $ref: "#/components/schemas/JsonType"
+
+    # ----------------------------------------------------------------
+    # Binary recursive types
+    # ----------------------------------------------------------------
+
+    Space:
+      type: object
+      properties:
+        label:
+          type: string
+        data:
+          $ref: "#/components/schemas/JsonType"
+        dual:
+          $ref: "#/components/schemas/DualSpace"
+
+    DualSpace:
+      type: object
+      properties:
+        label:
+          type: string
+        data:
+          $ref: "#/components/schemas/JsonType"
+        predual:
+          $ref: "#/components/schemas/Space"
diff -pruN 0.26.4-3/tests/data/openapi/union_default_object.yaml 0.45.0-1/tests/data/openapi/union_default_object.yaml
--- 0.26.4-3/tests/data/openapi/union_default_object.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/union_default_object.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,47 @@
+openapi: 3.1.0
+info:
+  title: Union Default Object Test
+  version: 0.1.0
+servers:
+- url: http://example.com
+paths:
+  /test:
+    get:
+      responses:
+        '200':
+          description: OK
+components:
+  schemas:
+    Interval:
+      type: object
+      properties:
+        start:
+          type: integer
+        end:
+          type: integer
+    Container:
+      type: object
+      properties:
+        # Union[Interval, str] with dict default - should use default_factory
+        interval_or_string:
+          anyOf:
+            - $ref: '#/components/schemas/Interval'
+            - type: string
+          default:
+            start: 2009
+            end: 2019
+        # Union[Interval, str] with string default - should NOT use default_factory
+        string_or_interval:
+          anyOf:
+            - $ref: '#/components/schemas/Interval'
+            - type: string
+          default: "some string value"
+        # Union[Dict, Interval] with dict default - should NOT use default_factory (dict arm)
+        dict_or_interval:
+          anyOf:
+            - type: object
+              additionalProperties:
+                type: string
+            - $ref: '#/components/schemas/Interval'
+          default:
+            key: "value"
diff -pruN 0.26.4-3/tests/data/openapi/unique_items_default_set.yaml 0.45.0-1/tests/data/openapi/unique_items_default_set.yaml
--- 0.26.4-3/tests/data/openapi/unique_items_default_set.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/unique_items_default_set.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+openapi: "3.0.0"
+info:
+  title: Test API - Unique Items Default as Set
+  version: 1.0.0
+paths: {}
+components:
+  schemas:
+    TestModel:
+      type: object
+      properties:
+        tags:
+          type: array
+          items:
+            type: string
+          uniqueItems: true
+          default:
+            - tag1
+            - tag2
+        empty_tags:
+          type: array
+          items:
+            type: string
+          uniqueItems: true
+          default: []
+        numbers:
+          type: array
+          items:
+            type: integer
+          uniqueItems: true
+          default:
+            - 1
+            - 2
+            - 3
diff -pruN 0.26.4-3/tests/data/openapi/unquoted_null.yaml 0.45.0-1/tests/data/openapi/unquoted_null.yaml
--- 0.26.4-3/tests/data/openapi/unquoted_null.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/unquoted_null.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+# Test behavior with `tupe: null` when `null` is unquoted
+
+openapi: "3.1.1"
+components:
+  schemas:
+    Thing:
+      type: object
+      required:
+        - value
+      properties:
+        value:
+          type:
+            - string
+            - null
+    NullThing:
+      type: null
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/unsorted_optional_fields.yaml 0.45.0-1/tests/data/openapi/unsorted_optional_fields.yaml
--- 0.26.4-3/tests/data/openapi/unsorted_optional_fields.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/unsorted_optional_fields.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+openapi: 3.0.3
+info:
+  title: Title
+  description: Title
+  version: 1.0.0
+servers:
+  - url: 'https'
+paths:
+components:
+  schemas:
+   Note:
+      type: object
+      required:
+        - text
+      properties:
+        author:
+          type: string
+        text:
+          type: string
diff -pruN 0.26.4-3/tests/data/openapi/webhooks.yaml 0.45.0-1/tests/data/openapi/webhooks.yaml
--- 0.26.4-3/tests/data/openapi/webhooks.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/webhooks.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,52 @@
+openapi: 3.1.0
+info:
+  title: Webhook Test API
+  version: 1.0.0
+webhooks:
+  pet.new:
+    post:
+      requestBody:
+        description: Information about a new pet in the system
+        content:
+          application/json:
+            schema:
+              $ref: "#/components/schemas/Pet"
+      responses:
+        '200':
+          description: Return a 200 status to indicate that the data was received successfully
+  pet.updated:
+    post:
+      requestBody:
+        description: Information about an updated pet
+        content:
+          application/json:
+            schema:
+              $ref: "#/components/schemas/PetUpdate"
+      responses:
+        '200':
+          description: Return a 200 status to indicate that the data was received successfully
+components:
+  schemas:
+    Pet:
+      type: object
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
+    PetUpdate:
+      type: object
+      properties:
+        id:
+          type: integer
+          format: int64
+        name:
+          type: string
+        tag:
+          type: string
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/openapi/webhooks_ref_with_external_schema/openapi.yaml 0.45.0-1/tests/data/openapi/webhooks_ref_with_external_schema/openapi.yaml
--- 0.26.4-3/tests/data/openapi/webhooks_ref_with_external_schema/openapi.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/webhooks_ref_with_external_schema/openapi.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+openapi: "3.1.0"
+info:
+  version: 1.0.0
+  title: Webhook Test API
+webhooks:
+  newPet:
+    $ref: "./webhooks/pet_webhook.yaml#/newPet"
+components:
+  schemas:
+    Error:
+      type: object
+      required:
+        - code
+        - message
+      properties:
+        code:
+          type: integer
+        message:
+          type: string
diff -pruN 0.26.4-3/tests/data/openapi/webhooks_ref_with_external_schema/webhooks/model.yaml 0.45.0-1/tests/data/openapi/webhooks_ref_with_external_schema/webhooks/model.yaml
--- 0.26.4-3/tests/data/openapi/webhooks_ref_with_external_schema/webhooks/model.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/webhooks_ref_with_external_schema/webhooks/model.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+Pet:
+  type: object
+  required:
+    - id
+    - name
+  properties:
+    id:
+      type: integer
+    name:
+      type: string
diff -pruN 0.26.4-3/tests/data/openapi/webhooks_ref_with_external_schema/webhooks/pet_webhook.yaml 0.45.0-1/tests/data/openapi/webhooks_ref_with_external_schema/webhooks/pet_webhook.yaml
--- 0.26.4-3/tests/data/openapi/webhooks_ref_with_external_schema/webhooks/pet_webhook.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/webhooks_ref_with_external_schema/webhooks/pet_webhook.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,12 @@
+newPet:
+  post:
+    summary: New pet webhook
+    operationId: newPetWebhook
+    requestBody:
+      content:
+        application/json:
+          schema:
+            $ref: "./model.yaml#/Pet"
+    responses:
+      '200':
+        description: Success
diff -pruN 0.26.4-3/tests/data/openapi/webhooks_with_parameters.yaml 0.45.0-1/tests/data/openapi/webhooks_with_parameters.yaml
--- 0.26.4-3/tests/data/openapi/webhooks_with_parameters.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/webhooks_with_parameters.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,55 @@
+openapi: 3.1.0
+info:
+  title: Webhook Test API with Parameters
+  version: 1.0.0
+webhooks:
+  pet.new:
+    parameters:
+      - name: X-Webhook-Id
+        in: query
+        required: true
+        schema:
+          type: string
+    post:
+      parameters:
+        - name: X-Request-Id
+          in: query
+          required: false
+          schema:
+            type: string
+      requestBody:
+        content:
+          application/json:
+            schema:
+              $ref: "#/components/schemas/Pet"
+      responses:
+        '200':
+          description: OK
+  pet.updated:
+    parameters:
+      - name: X-Correlation-Id
+        in: query
+        required: true
+        schema:
+          type: string
+    post:
+      requestBody:
+        content:
+          application/json:
+            schema:
+              $ref: "#/components/schemas/Pet"
+      responses:
+        '200':
+          description: OK
+components:
+  schemas:
+    Pet:
+      type: object
+      required:
+        - id
+        - name
+      properties:
+        id:
+          type: integer
+        name:
+          type: string
diff -pruN 0.26.4-3/tests/data/openapi/x_enum_names.yaml 0.45.0-1/tests/data/openapi/x_enum_names.yaml
--- 0.26.4-3/tests/data/openapi/x_enum_names.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/x_enum_names.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,45 @@
+openapi: 3.0.0
+info:
+  title: Test x-enumNames
+  version: 1.0.0
+paths: {}
+components:
+  schemas:
+    # Integer enum with x-enumNames
+    CustomerColor:
+      type: integer
+      enum: [1, 2, 3, 4, 5]
+      x-enumNames:
+        - BloodOrange
+        - Sunflower
+        - LightGreen
+        - SkyBlue
+        - Purple
+
+    # String enum with x-enumNames
+    StringStatus:
+      type: string
+      enum: ['pending', 'active', 'closed']
+      x-enumNames:
+        - Pending
+        - Active
+        - Closed
+
+    # x-enum-varnames takes priority over x-enumNames
+    PriorityTest:
+      type: integer
+      enum: [1, 2]
+      x-enum-varnames:
+        - VarnameOne
+        - VarnameTwo
+      x-enumNames:
+        - EnumNameOne
+        - EnumNameTwo
+
+    # x-enumNames shorter than enum (graceful fallback)
+    ShortNames:
+      type: integer
+      enum: [1, 2, 3, 4]
+      x-enumNames:
+        - First
+        - Second
diff -pruN 0.26.4-3/tests/data/openapi/x_enum_varnames.yaml 0.45.0-1/tests/data/openapi/x_enum_varnames.yaml
--- 0.26.4-3/tests/data/openapi/x_enum_varnames.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/openapi/x_enum_varnames.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,61 @@
+openapi: "3.0.0"
+components:
+  schemas:
+    string:
+      type: string
+      enum:
+        - 'dog'
+        - 'cat'
+        - 'snake'
+    unknown_type_string:
+      enum:
+        - 'dog'
+        - 'cat'
+        - 'snake'
+    named_string:
+      type: string
+      description: Operator to filter data by.
+      enum:
+        - '='
+        - '!='
+        - '>'
+        - '<'
+        - '>='
+        - '<='
+      x-enum-varnames:
+        - EQ
+        - NE
+        - GT
+        - LT
+        - GE
+        - LE
+      example: '>='
+      xml:
+        attribute: true
+    named_number:
+      type: number
+      description: Operator to filter data by.
+      enum:
+        - 1
+        - 2
+        - 3
+      x-enum-varnames:
+        - one
+        - two
+        - three
+      example: 1
+    number:
+      type: number
+      description: Operator to filter data by.
+      enum:
+        - 1
+        - 2
+        - 3
+      example: 1
+    unknown_type_number:
+      description: Operator to filter data by.
+      enum:
+        - 1
+        - 2
+        - 3
+      example: 1
\ No newline at end of file
diff -pruN 0.26.4-3/tests/data/project/pyproject.toml 0.45.0-1/tests/data/project/pyproject.toml
--- 0.26.4-3/tests/data/project/pyproject.toml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/project/pyproject.toml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,16 @@
+[tool.black]
+skip-string-normalization = false
+line-length = 30
+
+[tool.datamodel-codegen]
+input = "INPUT_PATH"
+output = "OUTPUT_PATH"
+input_file_type = 'openapi'
+validation = true
+field-constraints = true
+snake-case-field = true
+strip-default-none = true
+target-python-version = "3.9"
+aliases = "ALIASES_PATH"
+extra-template-data = "EXTRA_TEMPLATE_DATA_PATH"
+custom-template-dir = "CUSTOM_TEMPLATE_DIR_PATH"
diff -pruN 0.26.4-3/tests/data/pyproject.toml 0.45.0-1/tests/data/pyproject.toml
--- 0.26.4-3/tests/data/pyproject.toml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/pyproject.toml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,10 @@
+[tool.isort]
+multi_line_output = 3
+include_trailing_comma = true
+force_grid_wrap = 0
+use_parentheses = true
+line_length = 88
+skip = "tests/data"
+
+sections = [ 'FUTURE', 'STDLIB', 'THIRDPARTY', 'FIRSTPARTY', 'LOCALFOLDER' ]
+known_first_party = [ 'custom_module' ]
diff -pruN 0.26.4-3/tests/data/python/custom_formatters/add_comment.py 0.45.0-1/tests/data/python/custom_formatters/add_comment.py
--- 0.26.4-3/tests/data/python/custom_formatters/add_comment.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/python/custom_formatters/add_comment.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+from datamodel_code_generator.format import CustomCodeFormatter
+
+
+class CodeFormatter(CustomCodeFormatter):
+    """Simple correct formatter. Adding a comment to top of code."""
+    def apply(self, code: str) -> str:
+        return f'# a comment\n{code}'
diff -pruN 0.26.4-3/tests/data/python/custom_formatters/add_license.py 0.45.0-1/tests/data/python/custom_formatters/add_license.py
--- 0.26.4-3/tests/data/python/custom_formatters/add_license.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/python/custom_formatters/add_license.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+from typing import Any, Dict
+from pathlib import Path
+
+from datamodel_code_generator.format import CustomCodeFormatter
+
+
+class CodeFormatter(CustomCodeFormatter):
+    """Add a license to file from license file path."""
+
+    def __init__(self, formatter_kwargs: Dict[str, Any]) -> None:
+        super().__init__(formatter_kwargs)
+
+        if "license_file" not in formatter_kwargs:
+            raise ValueError()
+
+        license_file_path = Path(formatter_kwargs["license_file"]).resolve()
+
+        with license_file_path.open("r") as f:
+            license_file = f.read()
+
+        self.license_header = "\n".join([f"# {line}".strip() for line in license_file.split("\n")])
+
+    def apply(self, code: str) -> str:
+        return f"{self.license_header}\n{code}"
diff -pruN 0.26.4-3/tests/data/python/custom_formatters/license_example.txt 0.45.0-1/tests/data/python/custom_formatters/license_example.txt
--- 0.26.4-3/tests/data/python/custom_formatters/license_example.txt	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/python/custom_formatters/license_example.txt	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+MIT License
+
+Copyright (c) 2023 Blah-blah
diff -pruN 0.26.4-3/tests/data/python/custom_formatters/not_subclass.py 0.45.0-1/tests/data/python/custom_formatters/not_subclass.py
--- 0.26.4-3/tests/data/python/custom_formatters/not_subclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/python/custom_formatters/not_subclass.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+class CodeFormatter:
+    """Invalid formatter: is not subclass of `datamodel_code_generator.format.CustomCodeFormatter`."""
+    pass
diff -pruN 0.26.4-3/tests/data/python/custom_formatters/wrong.py 0.45.0-1/tests/data/python/custom_formatters/wrong.py
--- 0.26.4-3/tests/data/python/custom_formatters/wrong.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/python/custom_formatters/wrong.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+from datamodel_code_generator.format import CustomCodeFormatter
+
+
+class WrongFormatterName(CustomCodeFormatter):
+    """Invalid formatter: correct name is CodeFormatter."""
+    def apply(self, code: str) -> str:
+        return f'# a comment\n{code}'
diff -pruN 0.26.4-3/tests/data/python/space_and_special_characters_dict.py 0.45.0-1/tests/data/python/space_and_special_characters_dict.py
--- 0.26.4-3/tests/data/python/space_and_special_characters_dict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/python/space_and_special_characters_dict.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,26 @@
+{
+    "Serial Number": "A12345678",
+    "Timestamp": "2020-05-26T12:15:25.792741Z",
+    "Data": {
+        "Length (m)": 12.34,
+        "Symmetric deviation (%)": 12.216564148290807,
+        "Total running time (s)": 974,
+        "Mass (kg)": 42.23,
+        "Initial parameters": {
+            "V1": 123,
+            "V2": 456
+        },
+        "class": "Unknown"
+    },
+    "values": {
+        "1 Step": "42",
+        "2 Step": "23"
+    },
+    "recursive": {
+        "sub": {
+            "recursive": {
+                "value": 42.23
+            }
+        }
+    }
+}
diff -pruN 0.26.4-3/tests/data/templates/pydantic/BaseModel.jinja2 0.45.0-1/tests/data/templates/pydantic/BaseModel.jinja2
--- 0.26.4-3/tests/data/templates/pydantic/BaseModel.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/templates/pydantic/BaseModel.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+class {{ class_name }}({{ base_class }}):{% if comment is defined %}  # {{ comment }}{% endif %}
+{%- if not fields %}
+    pass
+{%- endif %}
+{%- for field in fields -%}
+    {%- if field.required %}
+    {{ field.name }}: {{ field.type_hint }}
+    {%- else %}
+    {{ field.name }}: {{ field.type_hint }} = {{ field.default }}
+    {%- endif %}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring }}
+    """
+    {%- endif %}
+{%- endfor -%}
diff -pruN 0.26.4-3/tests/data/templates_old_style/BaseModel.jinja2 0.45.0-1/tests/data/templates_old_style/BaseModel.jinja2
--- 0.26.4-3/tests/data/templates_old_style/BaseModel.jinja2	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/templates_old_style/BaseModel.jinja2	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,19 @@
+{% for decorator in decorators -%}
+{{ decorator }}
+{% endfor -%}
+class {{ class_name }}({{ base_class }}):{% if comment is defined %}  # {{ comment }}{% endif %}
+{%- if not fields %}
+    pass
+{%- endif %}
+{%- for field in fields -%}
+    {%- if field.required %}
+    {{ field.name }}: {{ field.type_hint }}
+    {%- else %}
+    {{ field.name }}: {{ field.type_hint }} = {{ field.default }}
+    {%- endif %}
+    {%- if field.docstring %}
+    """
+    {{ field.docstring }}
+    """
+    {%- endif %}
+{%- endfor -%}
diff -pruN 0.26.4-3/tests/data/yaml/invalid_root_list.yaml 0.45.0-1/tests/data/yaml/invalid_root_list.yaml
--- 0.26.4-3/tests/data/yaml/invalid_root_list.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/yaml/invalid_root_list.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+- item1
+- item2
+- item3
diff -pruN 0.26.4-3/tests/data/yaml/pet.yaml 0.45.0-1/tests/data/yaml/pet.yaml
--- 0.26.4-3/tests/data/yaml/pet.yaml	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/data/yaml/pet.yaml	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,3 @@
+Pet:
+  name: cat
+  age: 3
\ No newline at end of file
diff -pruN 0.26.4-3/tests/main/__init__.py 0.45.0-1/tests/main/__init__.py
--- 0.26.4-3/tests/main/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/main/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1 @@
+"""Main integration tests package."""
diff -pruN 0.26.4-3/tests/main/conftest.py 0.45.0-1/tests/main/conftest.py
--- 0.26.4-3/tests/main/conftest.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/main/conftest.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,491 @@
+"""Shared fixtures and utilities for main integration tests."""
+
+from __future__ import annotations
+
+import importlib.util
+import inspect
+import shutil
+import sys
+import time
+from argparse import Namespace
+from collections.abc import Callable, Generator, Sequence
+from pathlib import Path
+from typing import Literal
+
+import black
+import pytest
+from packaging import version
+
+from datamodel_code_generator import DataModelType
+from datamodel_code_generator.__main__ import Exit, main
+from datamodel_code_generator.util import PYDANTIC_V2
+from tests.conftest import (
+    AssertFileContent,
+    _validation_stats,
+    assert_directory_content,
+    assert_output,
+    freeze_time,
+    validate_generated_code,
+)
+
+InputFileTypeLiteral = Literal["auto", "openapi", "jsonschema", "json", "yaml", "dict", "csv", "graphql"]
+CopyFilesMapping = Sequence[tuple[Path, Path]]
+
+MSGSPEC_LEGACY_BLACK_SKIP = pytest.mark.skipif(
+    sys.version_info[:2] == (3, 12) and version.parse(black.__version__) < version.parse("24.0.0"),
+    reason="msgspec.Struct formatting differs with python3.12 + black < 24",
+)
+
+LEGACY_BLACK_SKIP = pytest.mark.skipif(
+    version.parse(black.__version__) < version.parse("24.0.0"),
+    reason="Type annotation formatting differs with black < 24",
+)
+
+from datamodel_code_generator.format import PythonVersion, is_supported_in_black  # noqa: E402
+
+BLACK_PY313_SKIP = pytest.mark.skipif(
+    not is_supported_in_black(PythonVersion.PY_313),
+    reason=f"Installed black ({black.__version__}) doesn't support Python 3.13",
+)
+
+BLACK_PY314_SKIP = pytest.mark.skipif(
+    not is_supported_in_black(PythonVersion.PY_314),
+    reason=f"Installed black ({black.__version__}) doesn't support Python 3.14",
+)
+
+DATA_PATH: Path = Path(__file__).parent.parent / "data"
+EXPECTED_MAIN_PATH: Path = DATA_PATH / "expected" / "main"
+
+PYTHON_DATA_PATH: Path = DATA_PATH / "python"
+OPEN_API_DATA_PATH: Path = DATA_PATH / "openapi"
+JSON_SCHEMA_DATA_PATH: Path = DATA_PATH / "jsonschema"
+GRAPHQL_DATA_PATH: Path = DATA_PATH / "graphql"
+JSON_DATA_PATH: Path = DATA_PATH / "json"
+CSV_DATA_PATH: Path = DATA_PATH / "csv"
+YAML_DATA_PATH: Path = DATA_PATH / "yaml"
+ALIASES_DATA_PATH: Path = DATA_PATH / "aliases"
+
+EXPECTED_OPENAPI_PATH: Path = EXPECTED_MAIN_PATH / "openapi"
+EXPECTED_JSON_SCHEMA_PATH: Path = EXPECTED_MAIN_PATH / "jsonschema"
+EXPECTED_GRAPHQL_PATH: Path = EXPECTED_MAIN_PATH / "graphql"
+EXPECTED_JSON_PATH: Path = EXPECTED_MAIN_PATH / "json"
+EXPECTED_CSV_PATH: Path = EXPECTED_MAIN_PATH / "csv"
+
+TIMESTAMP = "1985-10-26T01:21:00-07:00"
+DEFAULT_FREEZE_TIME = "2019-07-26"
+
+
+@pytest.fixture(autouse=True)
+def reset_namespace(monkeypatch: pytest.MonkeyPatch) -> None:
+    """Reset argument namespace before each test."""
+    namespace_ = Namespace(no_color=False)
+    monkeypatch.setattr("datamodel_code_generator.__main__.namespace", namespace_)
+    monkeypatch.setattr("datamodel_code_generator.arguments.namespace", namespace_)
+
+
+@pytest.fixture(autouse=True)
+def auto_freeze_time() -> Generator[None, None, None]:
+    """Auto-freeze time for all tests in main/ directory."""
+    with freeze_time(DEFAULT_FREEZE_TIME):
+        yield
+
+
+@pytest.fixture
+def output_file(tmp_path: Path) -> Path:
+    """Return standard output file path."""
+    return tmp_path / "output.py"
+
+
+@pytest.fixture
+def output_dir(tmp_path: Path) -> Path:
+    """Return standard output directory path."""
+    return tmp_path / "model"
+
+
+def _copy_files(copy_files: CopyFilesMapping | None) -> None:
+    """Copy files from source to destination paths."""
+    if copy_files is not None:
+        for src, dst in copy_files:
+            shutil.copy(src, dst)
+
+
+def _assert_exit_code(return_code: Exit, expected_exit: Exit, context: str) -> None:
+    """Assert exit code matches expected value."""
+    if return_code != expected_exit:  # pragma: no cover
+        pytest.fail(f"Expected exit code {expected_exit!r}, got {return_code!r}\n{context}")
+
+
+def _extend_args(
+    args: list[str],
+    *,
+    input_path: Path | None = None,
+    output_path: Path | None = None,
+    input_file_type: InputFileTypeLiteral | None = None,
+    extra_args: Sequence[str] | None = None,
+) -> None:
+    """Extend args with optional input_path, output_path, input_file_type and extra_args."""
+    if input_path is not None:
+        args.extend(["--input", str(input_path)])
+    if output_path is not None:
+        args.extend(["--output", str(output_path)])
+    if input_file_type is not None:
+        args.extend(["--input-file-type", input_file_type])
+    if extra_args is not None:
+        args.extend(extra_args)
+
+
+def _run_main(
+    input_path: Path,
+    output_path: Path,
+    input_file_type: InputFileTypeLiteral | None = None,
+    *,
+    extra_args: Sequence[str] | None = None,
+    copy_files: CopyFilesMapping | None = None,
+) -> Exit:
+    """Execute main() with standard arguments (internal use)."""
+    _copy_files(copy_files)
+    args: list[str] = []
+    _extend_args(
+        args, input_path=input_path, output_path=output_path, input_file_type=input_file_type, extra_args=extra_args
+    )
+    return main(args)
+
+
+def _run_main_url(
+    url: str,
+    output_path: Path,
+    input_file_type: InputFileTypeLiteral | None = None,
+    *,
+    extra_args: Sequence[str] | None = None,
+) -> Exit:
+    """Execute main() with URL input (internal use)."""
+    args = ["--url", url]
+    _extend_args(args, output_path=output_path, input_file_type=input_file_type, extra_args=extra_args)
+    return main(args)
+
+
+def run_main_with_args(
+    args: Sequence[str],
+    *,
+    expected_exit: Exit = Exit.OK,
+    capsys: pytest.CaptureFixture[str] | None = None,
+    expected_stdout_path: Path | None = None,
+) -> Exit:
+    """Execute main() with custom arguments.
+
+    Args:
+        args: Command line arguments to pass to main()
+        expected_exit: Expected exit code (default: Exit.OK)
+        capsys: pytest capsys fixture for capturing output (required if expected_stdout_path is set)
+        expected_stdout_path: Path to file with expected stdout content
+
+    Returns:
+        Exit code from main()
+    """
+    __tracebackhide__ = True
+    return_code = main(list(args))
+    _assert_exit_code(return_code, expected_exit, f"Args: {args}")
+    if expected_stdout_path is not None:  # pragma: no branch
+        if capsys is None:  # pragma: no cover
+            pytest.fail("capsys is required when expected_stdout_path is set")
+        captured = capsys.readouterr()
+        assert_output(captured.out, expected_stdout_path)
+    return return_code
+
+
+def run_main_and_assert(  # noqa: PLR0912
+    *,
+    input_path: Path | None = None,
+    output_path: Path | None = None,
+    input_file_type: InputFileTypeLiteral | None = None,
+    extra_args: Sequence[str] | None = None,
+    expected_exit: Exit = Exit.OK,
+    # Output verification options (use one)
+    assert_func: AssertFileContent | None = None,
+    expected_file: str | Path | None = None,
+    expected_output: str | None = None,
+    expected_directory: Path | None = None,
+    output_to_expected: Sequence[tuple[str, str | Path]] | None = None,
+    file_should_not_exist: Path | None = None,
+    # Verification options
+    ignore_whitespace: bool = False,
+    transform: Callable[[str], str] | None = None,
+    # Capture options
+    capsys: pytest.CaptureFixture[str] | None = None,
+    expected_stdout_path: Path | None = None,
+    expected_stderr: str | None = None,
+    expected_stderr_contains: str | None = None,
+    assert_no_stderr: bool = False,
+    # Other options
+    copy_files: CopyFilesMapping | None = None,
+    # stdin options
+    stdin_path: Path | None = None,
+    monkeypatch: pytest.MonkeyPatch | None = None,
+    # Code validation options
+    skip_code_validation: bool = False,
+) -> None:
+    """Execute main() and assert output.
+
+    This is the unified helper function for testing file-based input.
+
+    Input options:
+        input_path: Path to input schema file
+        stdin_path: Path to file that will be used as stdin (requires monkeypatch)
+        monkeypatch: pytest monkeypatch fixture for mocking stdin
+
+    Output options:
+        output_path: Path to output file/directory (None for stdout-only tests)
+
+    Common options:
+        input_file_type: Type of input file (openapi, jsonschema, graphql, etc.)
+        extra_args: Additional CLI arguments
+        expected_exit: Expected exit code (default: Exit.OK)
+        copy_files: Files to copy before running
+
+    Output verification (use one):
+        assert_func + expected_file: Compare with expected file using assert function
+        expected_output: Compare with string directly
+        expected_directory: Compare entire directory
+        output_to_expected: Compare multiple files
+        file_should_not_exist: Assert a file does NOT exist
+
+    Verification modifiers:
+        ignore_whitespace: Ignore whitespace when comparing (for expected_output)
+        transform: Transform output before comparison
+
+    Capture verification:
+        capsys: pytest capsys fixture
+        expected_stdout_path: Compare stdout with file
+        expected_stderr: Assert exact stderr match
+        expected_stderr_contains: Assert stderr contains string
+        assert_no_stderr: Assert stderr is empty
+    """
+    __tracebackhide__ = True
+
+    # Handle stdin input
+    if stdin_path is not None:
+        if monkeypatch is None:  # pragma: no cover
+            pytest.fail("monkeypatch is required when using stdin_path")
+        monkeypatch.setattr("sys.stdin", stdin_path.open(encoding="utf-8"))
+        args: list[str] = []
+        _extend_args(args, output_path=output_path, input_file_type=input_file_type, extra_args=extra_args)
+        return_code = main(args)
+    # Handle stdout-only output (no output_path)
+    elif output_path is None:
+        if input_path is None:  # pragma: no cover
+            pytest.fail("input_path is required when output_path is None")
+        args = []
+        _extend_args(args, input_path=input_path, input_file_type=input_file_type, extra_args=extra_args)
+        return_code = main(args)
+    # Standard file input
+    else:
+        if input_path is None:  # pragma: no cover
+            pytest.fail("input_path is required")
+        return_code = _run_main(input_path, output_path, input_file_type, extra_args=extra_args, copy_files=copy_files)
+
+    _assert_exit_code(return_code, expected_exit, f"Input: {input_path}")
+
+    # Handle capture assertions
+    if capsys is not None and (
+        expected_stdout_path is not None
+        or expected_stderr is not None
+        or expected_stderr_contains is not None
+        or assert_no_stderr
+    ):
+        captured = capsys.readouterr()
+        if expected_stdout_path is not None:
+            assert_output(captured.out, expected_stdout_path)
+        if expected_stderr is not None and captured.err != expected_stderr:  # pragma: no cover
+            pytest.fail(f"Expected stderr:\n{expected_stderr}\n\nActual stderr:\n{captured.err}")
+        if expected_stderr_contains is not None and expected_stderr_contains not in captured.err:  # pragma: no cover
+            pytest.fail(f"Expected stderr to contain: {expected_stderr_contains!r}\n\nActual stderr:\n{captured.err}")
+        if assert_no_stderr and captured.err:  # pragma: no cover
+            pytest.fail(f"Expected no stderr, but got:\n{captured.err}")
+
+    # Skip output verification if expected_exit is not OK
+    if expected_exit != Exit.OK:
+        return
+
+    # Output verification
+    if expected_directory is not None:
+        if output_path is None:  # pragma: no cover
+            pytest.fail("output_path is required when using expected_directory")
+        assert_directory_content(output_path, expected_directory)
+    elif output_to_expected is not None:
+        if output_path is None:  # pragma: no cover
+            pytest.fail("output_path is required when using output_to_expected")
+        if assert_func is None:  # pragma: no cover
+            pytest.fail("assert_func is required when using output_to_expected")
+        for output_relative, exp_file in output_to_expected:
+            assert_func(output_path / output_relative, exp_file)
+    elif expected_output is not None:
+        if output_path is None:  # pragma: no cover
+            pytest.fail("output_path is required when using expected_output")
+        actual_output = output_path.read_text(encoding="utf-8")
+        if ignore_whitespace:
+            if "".join(actual_output.split()) != "".join(expected_output.split()):  # pragma: no cover
+                pytest.fail(
+                    f"Output mismatch (ignoring whitespace)\nExpected:\n{expected_output}\n\nActual:\n{actual_output}"
+                )
+        elif actual_output != expected_output:  # pragma: no cover
+            pytest.fail(f"Output mismatch\nExpected:\n{expected_output}\n\nActual:\n{actual_output}")
+    elif file_should_not_exist is not None:
+        if file_should_not_exist.exists():  # pragma: no cover
+            pytest.fail(f"File should not exist: {file_should_not_exist}")
+    elif assert_func is not None:
+        if output_path is None:  # pragma: no cover
+            pytest.fail("output_path is required when using assert_func")
+        if expected_file is None:  # pragma: no branch
+            frame = inspect.currentframe()
+            assert frame is not None
+            assert frame.f_back is not None
+            func_name = frame.f_back.f_code.co_name
+            del frame
+            for prefix in ("test_main_", "test_"):  # pragma: no branch
+                if func_name.startswith(prefix):
+                    func_name = func_name[len(prefix) :]
+                    break
+            expected_file = f"{func_name}.py"
+        assert_func(output_path, expected_file, transform=transform)
+
+    if output_path is not None and not skip_code_validation:
+        _validate_output_files(output_path, extra_args)
+
+
+def _get_argument_value(arguments: Sequence[str] | None, argument_name: str) -> str | None:
+    """Extract argument value from arguments."""
+    if arguments is None:
+        return None
+    argument_list = list(arguments)
+    for index, argument in enumerate(argument_list):
+        if argument == argument_name and index + 1 < len(argument_list):
+            return argument_list[index + 1]
+    return None
+
+
+def _parse_target_version(extra_arguments: Sequence[str] | None) -> tuple[int, int] | None:
+    """Parse target Python version from arguments."""
+    if (target_version := _get_argument_value(extra_arguments, "--target-python-version")) is None:
+        return None
+    try:
+        return tuple(int(part) for part in target_version.split("."))  # type: ignore[return-value]
+    except ValueError:  # pragma: no cover
+        return None
+
+
+def _should_skip_compile(extra_arguments: Sequence[str] | None) -> bool:
+    """Check if compile should be skipped when target version > runtime version."""
+    if (target_version := _parse_target_version(extra_arguments)) is None:
+        return False
+    return target_version > sys.version_info[:2]
+
+
+def _should_skip_exec(extra_arguments: Sequence[str] | None) -> bool:
+    """Check if exec should be skipped based on model type, pydantic version, and Python version."""
+    output_model_type = _get_argument_value(extra_arguments, "--output-model-type")
+    is_pydantic_v1 = output_model_type is None or output_model_type == DataModelType.PydanticBaseModel.value
+    if (is_pydantic_v1 and PYDANTIC_V2) or (
+        output_model_type == DataModelType.PydanticV2BaseModel.value and not PYDANTIC_V2
+    ):
+        return True
+    if (target_version := _parse_target_version(extra_arguments)) is None:
+        return True
+    if target_version != sys.version_info[:2]:
+        return True
+    return _get_argument_value(extra_arguments, "--base-class") is not None
+
+
+def _validate_output_files(output_path: Path, extra_arguments: Sequence[str] | None = None) -> None:
+    """Validate generated Python files by compiling/executing them."""
+    if _should_skip_compile(extra_arguments):
+        return
+    should_exec = not _should_skip_exec(extra_arguments)
+    if output_path.is_file() and output_path.suffix == ".py":
+        validate_generated_code(output_path.read_text(encoding="utf-8"), str(output_path), do_exec=should_exec)
+    elif output_path.is_dir():  # pragma: no cover
+        for python_file in output_path.rglob("*.py"):
+            validate_generated_code(python_file.read_text(encoding="utf-8"), str(python_file), do_exec=False)
+        if should_exec:  # pragma: no cover
+            _import_package(output_path)
+
+
+def _import_package(output_path: Path) -> None:  # pragma: no cover  # noqa: PLR0912
+    """Import generated packages to validate they can be loaded."""
+    if (output_path / "__init__.py").exists():
+        packages = [(output_path.parent, output_path.name)]
+    else:
+        packages = [
+            (output_path, directory.name)
+            for directory in output_path.iterdir()
+            if directory.is_dir() and (directory / "__init__.py").exists()
+        ]
+    if not packages:
+        return
+
+    imported_modules: list[str] = []
+    start_time = time.perf_counter()
+    try:
+        for parent_directory, package_name in packages:
+            package_path = parent_directory / package_name
+            sys.path.insert(0, str(parent_directory))
+            spec = importlib.util.spec_from_file_location(
+                package_name, package_path / "__init__.py", submodule_search_locations=[str(package_path)]
+            )
+            if spec is None or spec.loader is None:
+                continue
+            module = importlib.util.module_from_spec(spec)
+            sys.modules[package_name] = module
+            imported_modules.append(package_name)
+            spec.loader.exec_module(module)
+
+            for python_file in package_path.rglob("*.py"):
+                if python_file.name == "__init__.py":
+                    continue
+                relative_path = python_file.relative_to(package_path)
+                module_name = f"{package_name}.{'.'.join(relative_path.with_suffix('').parts)}"
+                submodule_spec = importlib.util.spec_from_file_location(module_name, python_file)
+                if submodule_spec is None or submodule_spec.loader is None:
+                    continue
+                submodule = importlib.util.module_from_spec(submodule_spec)
+                sys.modules[module_name] = submodule
+                imported_modules.append(module_name)
+                submodule_spec.loader.exec_module(submodule)
+        _validation_stats.record_exec(time.perf_counter() - start_time)
+    except Exception as exception:
+        _validation_stats.record_error(str(output_path), f"{type(exception).__name__}: {exception}")
+        raise
+    finally:
+        for parent_directory, _ in packages:
+            if str(parent_directory) in sys.path:
+                sys.path.remove(str(parent_directory))
+        for module_name in imported_modules:
+            sys.modules.pop(module_name, None)
+
+
+def run_main_url_and_assert(
+    *,
+    url: str,
+    output_path: Path,
+    input_file_type: InputFileTypeLiteral | None,
+    assert_func: AssertFileContent,
+    expected_file: str | Path,
+    extra_args: Sequence[str] | None = None,
+    transform: Callable[[str], str] | None = None,
+) -> None:
+    """Execute main() with URL input and assert output.
+
+    Args:
+        url: URL to fetch schema from
+        output_path: Path to output file
+        input_file_type: Type of input file (openapi, jsonschema, graphql, etc.)
+        assert_func: The assert_file_content function to use for verification
+        expected_file: Expected output filename
+        extra_args: Additional CLI arguments
+        transform: Optional function to transform output before comparison
+    """
+    __tracebackhide__ = True
+    return_code = _run_main_url(url, output_path, input_file_type, extra_args=extra_args)
+    _assert_exit_code(return_code, Exit.OK, f"URL: {url}")
+    assert_func(output_path, expected_file, transform=transform)
+
+    _validate_output_files(output_path, extra_args)
diff -pruN 0.26.4-3/tests/main/graphql/__init__.py 0.45.0-1/tests/main/graphql/__init__.py
--- 0.26.4-3/tests/main/graphql/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/main/graphql/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1 @@
+"""GraphQL integration tests package."""
diff -pruN 0.26.4-3/tests/main/graphql/conftest.py 0.45.0-1/tests/main/graphql/conftest.py
--- 0.26.4-3/tests/main/graphql/conftest.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/main/graphql/conftest.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+"""Shared fixtures for GraphQL tests."""
+
+from __future__ import annotations
+
+from tests.conftest import create_assert_file_content
+from tests.main.conftest import EXPECTED_GRAPHQL_PATH
+
+assert_file_content = create_assert_file_content(EXPECTED_GRAPHQL_PATH)
diff -pruN 0.26.4-3/tests/main/graphql/test_annotated.py 0.45.0-1/tests/main/graphql/test_annotated.py
--- 0.26.4-3/tests/main/graphql/test_annotated.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/main/graphql/test_annotated.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,121 @@
+"""Tests for GraphQL annotated types generation."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+from tests.main.conftest import GRAPHQL_DATA_PATH, run_main_and_assert
+from tests.main.graphql.conftest import assert_file_content
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+
+def test_annotated(output_file: Path) -> None:
+    """Test GraphQL code generation with annotated types."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "annotated.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel", "--use-annotated"],
+    )
+
+
+def test_annotated_use_standard_collections(output_file: Path) -> None:
+    """Test GraphQL annotated types with standard collections."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "annotated.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--use-annotated",
+            "--use-standard-collections",
+        ],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-annotated", "--use-union-operator"],
+    input_schema="graphql/annotated.graphql",
+    cli_args=[
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--use-annotated",
+        "--use-standard-collections",
+        "--use-union-operator",
+    ],
+    golden_output="graphql/annotated_use_standard_collections_use_union_operator.py",
+    related_options=["--use-standard-collections"],
+)
+def test_annotated_use_standard_collections_use_union_operator(output_file: Path) -> None:
+    """Test GraphQL annotated types with standard collections and union operator."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "annotated.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--use-annotated",
+            "--use-standard-collections",
+            "--use-union-operator",
+        ],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-annotated", "--use-union-operator"],
+    input_schema="graphql/annotated.graphql",
+    cli_args=["--output-model-type", "pydantic_v2.BaseModel", "--use-annotated", "--use-union-operator"],
+    golden_output="graphql/annotated_use_union_operator.py",
+)
+def test_annotated_use_union_operator(output_file: Path) -> None:
+    """Test GraphQL annotated types with union operator."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "annotated.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--use-annotated",
+            "--use-union-operator",
+        ],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--aliases", "--use-annotated"],
+    input_schema="graphql/field-aliases.graphql",
+    cli_args=[
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--use-annotated",
+        "--aliases",
+        "graphql/field-aliases.json",
+    ],
+    golden_output="graphql/annotated_field_aliases.py",
+)
+def test_annotated_field_aliases(output_file: Path) -> None:
+    """Test GraphQL annotated types with field aliases."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "field-aliases.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--use-annotated",
+            "--aliases",
+            str(GRAPHQL_DATA_PATH / "field-aliases.json"),
+        ],
+    )
diff -pruN 0.26.4-3/tests/main/graphql/test_main_graphql.py 0.45.0-1/tests/main/graphql/test_main_graphql.py
--- 0.26.4-3/tests/main/graphql/test_main_graphql.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/main/graphql/test_main_graphql.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,577 @@
+"""Tests for GraphQL schema code generation."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import black
+import pytest
+
+from tests.main.conftest import GRAPHQL_DATA_PATH, run_main_and_assert
+from tests.main.graphql.conftest import assert_file_content
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "simple_star_wars.py",
+        ),
+        (
+            "dataclasses.dataclass",
+            "simple_star_wars_dataclass.py",
+        ),
+    ],
+)
+@pytest.mark.cli_doc(
+    options=["--output-model-type"],
+    input_schema="graphql/simple-star-wars.graphql",
+    cli_args=["--output-model-type", "pydantic.BaseModel"],
+    model_outputs={
+        "pydantic_v1": "graphql/simple_star_wars.py",
+        "dataclass": "graphql/simple_star_wars_dataclass.py",
+    },
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_simple_star_wars(output_model: str, expected_output: str, output_file: Path) -> None:
+    """Generate models from GraphQL with different output model types.
+
+    This example demonstrates using `--output-model-type` with GraphQL schemas
+    to generate either Pydantic models or dataclasses.
+    """
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "simple-star-wars.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--output-model-type", output_model],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_different_types_of_fields(output_file: Path) -> None:
+    """Test GraphQL code generation with different field types."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "different-types-of-fields.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="different_types_of_fields.py",
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-default-kwarg"],
+    input_schema="graphql/annotated.graphql",
+    cli_args=["--use-default-kwarg"],
+    golden_output="graphql/annotated_use_default_kwarg.py",
+)
+def test_main_use_default_kwarg(output_file: Path) -> None:
+    """Use default= keyword argument instead of positional argument for fields with defaults.
+
+    The `--use-default-kwarg` flag generates Field() declarations using `default=`
+    as a keyword argument instead of a positional argument for fields that have
+    default values.
+    """
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "annotated.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="annotated_use_default_kwarg.py",
+        extra_args=["--use-default-kwarg"],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_custom_scalar_types(output_file: Path) -> None:
+    """Test GraphQL code generation with custom scalar types."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "custom-scalar-types.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="custom_scalar_types.py",
+        extra_args=["--extra-template-data", str(GRAPHQL_DATA_PATH / "custom-scalar-types.json")],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.cli_doc(
+    options=["--aliases"],
+    input_schema="graphql/field-aliases.graphql",
+    cli_args=["--aliases", "graphql/field-aliases.json"],
+    golden_output="graphql/field_aliases.py",
+)
+def test_main_graphql_field_aliases(output_file: Path) -> None:
+    """Test GraphQL code generation with field aliases."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "field-aliases.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="field_aliases.py",
+        extra_args=["--aliases", str(GRAPHQL_DATA_PATH / "field-aliases.json")],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_casing(output_file: Path) -> None:
+    """Test GraphQL code generation with casing transformations."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "casing.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="casing.py",
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_enums(output_file: Path) -> None:
+    """Test GraphQL code generation with enums."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "enums.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="enums.py",
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "22",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_specialized_enums(output_file: Path) -> None:
+    """Test GraphQL code generation with specialized enums for Python 3.11+."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "enums.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="enums_specialized.py",
+        extra_args=["--target-python-version", "3.11"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--enum-field-as-literal"],
+    input_schema="graphql/enums.graphql",
+    cli_args=["--enum-field-as-literal", "all"],
+    golden_output="graphql/enum_literals_all.py",
+    comparison_output="graphql/enums.py",
+)
+def test_main_graphql_enums_as_literals_all(output_file: Path) -> None:
+    """Convert all enum fields to Literal types instead of Enum classes.
+
+    The `--enum-field-as-literal all` flag converts all enum types to Literal
+    type annotations. This is useful when you want string literal types instead
+    of Enum classes for all enumerations.
+    """
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "enums.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="enum_literals_all.py",
+        extra_args=["--enum-field-as-literal", "all"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--enum-field-as-literal"],
+    input_schema="graphql/enums.graphql",
+    cli_args=["--enum-field-as-literal", "one"],
+    golden_output="graphql/enum_literals_one.py",
+)
+def test_main_graphql_enums_as_literals_one(output_file: Path) -> None:
+    """Convert single-member enums to Literal types.
+
+    The `--enum-field-as-literal one` flag only converts enums with a single
+    member to Literal types, keeping multi-member enums as Enum classes.
+    """
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "enums.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="enum_literals_one.py",
+        extra_args=["--enum-field-as-literal", "one"],
+    )
+
+
+def test_main_graphql_enums_to_typed_dict(output_file: Path) -> None:
+    """Test GraphQL code generation paired with typing.TypedDict output which forces enums as literals."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "enums.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="enums_typed_dict.py",
+        extra_args=["--output-model-type", "typing.TypedDict"],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "22",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.cli_doc(
+    options=["--no-use-specialized-enum"],
+    input_schema="graphql/enums.graphql",
+    cli_args=["--target-python-version", "3.11", "--no-use-specialized-enum"],
+    golden_output="graphql/enums_no_specialized.py",
+    related_options=["--use-specialized-enum", "--target-python-version"],
+)
+def test_main_graphql_specialized_enums_disabled(output_file: Path) -> None:
+    """Disable specialized Enum classes for Python 3.11+ code generation.
+
+    The `--no-use-specialized-enum` flag prevents the generator from using
+    specialized Enum classes (StrEnum, IntEnum) when generating code for
+    Python 3.11+, falling back to standard Enum classes instead.
+    """
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "enums.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="enums_no_specialized.py",
+        extra_args=["--target-python-version", "3.11", "--no-use-specialized-enum"],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.cli_doc(
+    options=["--use-subclass-enum"],
+    input_schema="graphql/enums.graphql",
+    cli_args=["--use-subclass-enum"],
+    golden_output="graphql/enums_using_subclass.py",
+)
+def test_main_graphql_enums_subclass(output_file: Path) -> None:
+    """Generate typed Enum subclasses for enums with specific field types.
+
+    The `--use-subclass-enum` flag generates Enum classes as subclasses of the
+    appropriate field type (int, float, bytes, str) when an enum has a specific
+    type, providing better type safety and IDE support.
+    """
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "enums.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="enums_using_subclass.py",
+        extra_args=["--use-subclass-enum"],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_union(output_file: Path) -> None:
+    """Test GraphQL code generation with union types."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "union.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="union.py",
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.cli_doc(
+    options=["--additional-imports"],
+    input_schema="graphql/additional-imports.graphql",
+    cli_args=["--additional-imports", "datetime.datetime,datetime.date,mymodule.myclass.MyCustomPythonClass"],
+    golden_output="graphql/additional_imports.py",
+)
+def test_main_graphql_additional_imports(output_file: Path) -> None:
+    """Add custom imports to generated output files.
+
+    The `--additional-imports` flag allows you to specify custom imports as a
+    comma-delimited list that will be added to the generated output file. This
+    is useful when using custom types defined in external modules (e.g.,
+    "datetime.datetime,datetime.date,mymodule.myclass.MyCustomPythonClass").
+    """
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "additional-imports.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="additional_imports.py",
+        extra_args=[
+            "--extra-template-data",
+            str(GRAPHQL_DATA_PATH / "additional-imports-types.json"),
+            "--additional-imports",
+            "datetime.datetime,datetime.date,mymodule.myclass.MyCustomPythonClass",
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.cli_doc(
+    options=["--custom-formatters"],
+    input_schema="graphql/custom-scalar-types.graphql",
+    cli_args=["--custom-formatters", "tests.data.python.custom_formatters.add_comment"],
+    golden_output="graphql/custom_formatters.py",
+)
+def test_main_graphql_custom_formatters(output_file: Path) -> None:
+    """Apply custom Python code formatters to generated output.
+
+    The `--custom-formatters` flag allows you to specify custom Python functions
+    that will be applied to format the generated code. The formatter is specified
+    as a module path (e.g., "mymodule.formatter_function"). This is useful for
+    adding custom comments, modifying code structure, or applying project-specific
+    formatting rules beyond what black/isort provide.
+    """
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "custom-scalar-types.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="custom_formatters.py",
+        extra_args=["--custom-formatters", "tests.data.python.custom_formatters.add_comment"],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_use_standard_collections(output_file: Path) -> None:
+    """Test GraphQL code generation with standard collections."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "use-standard-collections.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="use_standard_collections.py",
+        extra_args=["--use-standard-collections"],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_use_union_operator(output_file: Path) -> None:
+    """Test GraphQL code generation with union operator syntax."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "use-union-operator.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="use_union_operator.py",
+        extra_args=["--use-union-operator"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--extra-fields"],
+    input_schema="graphql/simple-star-wars.graphql",
+    cli_args=["--extra-fields", "allow"],
+    golden_output="graphql/simple_star_wars_extra_fields_allow.py",
+)
+def test_main_graphql_extra_fields_allow(output_file: Path) -> None:
+    """Configure how generated models handle extra fields not defined in schema.
+
+    The `--extra-fields` flag sets the generated models to allow, forbid, or
+    ignore extra fields. With `--extra-fields allow`, models will accept and
+    store fields not defined in the schema. Options: allow, ignore, forbid.
+    """
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "simple-star-wars.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="simple_star_wars_extra_fields_allow.py",
+        extra_args=["--extra-fields", "allow"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-type-alias"],
+    input_schema="graphql/type_alias.graphql",
+    cli_args=["--use-type-alias"],
+    golden_output="graphql/type_alias.py",
+    related_options=["--target-python-version"],
+)
+def test_main_graphql_type_alias(output_file: Path) -> None:
+    """Use TypeAlias instead of root models for type definitions (experimental).
+
+    The `--use-type-alias` flag generates TypeAlias declarations instead of
+    root model classes for certain type definitions. For Python 3.9-3.11, it
+    generates TypeAliasType, and for Python 3.12+, it uses the 'type' statement
+    syntax. This feature is experimental.
+    """
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "type_alias.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="type_alias.py",
+        extra_args=["--use-type-alias"],
+    )
+
+
+@pytest.mark.skipif(
+    int(black.__version__.split(".")[0]) < 23,
+    reason="Installed black doesn't support the new 'type' statement",
+)
+def test_main_graphql_type_alias_py312(output_file: Path) -> None:
+    """Test that type statement syntax is generated for GraphQL schemas with Python 3.12+ and Pydantic v2."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "type_alias.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="type_alias_py312.py",
+        extra_args=[
+            "--use-type-alias",
+            "--target-python-version",
+            "3.12",
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.cli_doc(
+    options=["--dataclass-arguments"],
+    input_schema="graphql/simple-star-wars.graphql",
+    cli_args=[
+        "--output-model-type",
+        "dataclasses.dataclass",
+        "--dataclass-arguments",
+        '{"slots": true, "order": true}',
+    ],
+    golden_output="graphql/simple_star_wars_dataclass_arguments.py",
+    related_options=["--frozen-dataclasses", "--keyword-only"],
+)
+def test_main_graphql_dataclass_arguments(output_file: Path) -> None:
+    """Customize dataclass decorator arguments via JSON dictionary.
+
+    The `--dataclass-arguments` flag accepts custom dataclass arguments as a JSON
+    dictionary (e.g., '{"frozen": true, "kw_only": true, "slots": true, "order": true}').
+    This overrides individual flags like --frozen-dataclasses and provides fine-grained
+    control over dataclass generation.
+    """
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "simple-star-wars.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="simple_star_wars_dataclass_arguments.py",
+        extra_args=[
+            "--output-model-type",
+            "dataclasses.dataclass",
+            "--dataclass-arguments",
+            '{"slots": true, "order": true}',
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_graphql_dataclass_arguments_with_pydantic(output_file: Path) -> None:
+    """Test GraphQL code generation with dataclass arguments passed but using Pydantic model.
+
+    This verifies that dataclass_arguments is properly ignored for non-dataclass models.
+    """
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "simple-star-wars.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="simple_star_wars.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic.BaseModel",
+            "--dataclass-arguments",
+            '{"slots": true, "order": true}',
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.cli_doc(
+    options=["--keyword-only"],
+    input_schema="graphql/simple-star-wars.graphql",
+    cli_args=[
+        "--output-model-type",
+        "dataclasses.dataclass",
+        "--frozen-dataclasses",
+        "--keyword-only",
+        "--target-python-version",
+        "3.10",
+    ],
+    golden_output="graphql/simple_star_wars_dataclass_frozen_kw_only.py",
+    related_options=["--frozen-dataclasses", "--target-python-version", "--output-model-type"],
+)
+def test_main_graphql_dataclass_frozen_keyword_only(output_file: Path) -> None:
+    """Generate dataclasses with keyword-only fields (Python 3.10+).
+
+    The `--keyword-only` flag generates dataclasses where all fields must be
+    specified as keyword arguments (kw_only=True). This is only available for
+    Python 3.10+. When combined with `--frozen`, it creates immutable dataclasses
+    with keyword-only arguments, improving code clarity and preventing positional
+    argument errors.
+    """
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "simple-star-wars.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="simple_star_wars_dataclass_frozen_kw_only.py",
+        extra_args=[
+            "--output-model-type",
+            "dataclasses.dataclass",
+            "--frozen",
+            "--keyword-only",
+            "--target-python-version",
+            "3.10",
+        ],
+    )
diff -pruN 0.26.4-3/tests/main/jsonschema/__init__.py 0.45.0-1/tests/main/jsonschema/__init__.py
--- 0.26.4-3/tests/main/jsonschema/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/main/jsonschema/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1 @@
+"""JSON Schema integration tests package."""
diff -pruN 0.26.4-3/tests/main/jsonschema/conftest.py 0.45.0-1/tests/main/jsonschema/conftest.py
--- 0.26.4-3/tests/main/jsonschema/conftest.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/main/jsonschema/conftest.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+"""Shared fixtures for JSON Schema tests."""
+
+from __future__ import annotations
+
+from tests.conftest import create_assert_file_content
+from tests.main.conftest import EXPECTED_JSON_SCHEMA_PATH
+
+assert_file_content = create_assert_file_content(EXPECTED_JSON_SCHEMA_PATH)
diff -pruN 0.26.4-3/tests/main/jsonschema/test_main_jsonschema.py 0.45.0-1/tests/main/jsonschema/test_main_jsonschema.py
--- 0.26.4-3/tests/main/jsonschema/test_main_jsonschema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/main/jsonschema/test_main_jsonschema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,4331 @@
+"""Tests for JSON Schema input file code generation."""
+
+from __future__ import annotations
+
+import json
+from collections import defaultdict
+from pathlib import Path
+from typing import TYPE_CHECKING
+from unittest.mock import call
+
+import black
+import pytest
+from packaging import version
+
+from datamodel_code_generator import (
+    MIN_VERSION,
+    DataModelType,
+    InputFileType,
+    PythonVersion,
+    PythonVersionMin,
+    chdir,
+    generate,
+)
+from datamodel_code_generator.__main__ import Exit, main
+from datamodel_code_generator.format import is_supported_in_black
+from tests.conftest import assert_directory_content, freeze_time
+from tests.main.conftest import (
+    ALIASES_DATA_PATH,
+    DATA_PATH,
+    JSON_SCHEMA_DATA_PATH,
+    LEGACY_BLACK_SKIP,
+    MSGSPEC_LEGACY_BLACK_SKIP,
+    TIMESTAMP,
+    run_main_and_assert,
+    run_main_url_and_assert,
+    run_main_with_args,
+)
+from tests.main.jsonschema.conftest import EXPECTED_JSON_SCHEMA_PATH, assert_file_content
+
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+FixtureRequest = pytest.FixtureRequest
+
+
+@pytest.mark.benchmark
+def test_main_inheritance_forward_ref(output_file: Path, tmp_path: Path) -> None:
+    """Test inheritance with forward references."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "inheritance_forward_ref.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        copy_files=[(DATA_PATH / "pyproject.toml", tmp_path / "pyproject.toml")],
+    )
+
+
+@pytest.mark.benchmark
+@pytest.mark.cli_doc(
+    options=["--keep-model-order"],
+    input_schema="jsonschema/inheritance_forward_ref.json",
+    cli_args=["--keep-model-order"],
+    golden_output="jsonschema/inheritance_forward_ref_keep_model_order.py",
+    related_options=["--collapse-root-models"],
+)
+def test_main_inheritance_forward_ref_keep_model_order(output_file: Path, tmp_path: Path) -> None:
+    """Keep model definition order as specified in schema.
+
+    The `--keep-model-order` flag preserves the original definition order from the schema
+    instead of reordering models based on dependencies. This is useful when the order
+    of model definitions matters for documentation or readability.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "inheritance_forward_ref.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--keep-model-order"],
+        copy_files=[(DATA_PATH / "pyproject.toml", tmp_path / "pyproject.toml")],
+    )
+
+
+@pytest.mark.benchmark
+def test_main_type_alias_forward_ref_keep_model_order(output_file: Path) -> None:
+    """Test TypeAliasType with forward references keeping model order."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "type_alias_forward_ref.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=[
+            "--keep-model-order",
+            "--output-model-type",
+            "typing.TypedDict",
+            "--use-standard-collections",
+            "--use-union-operator",
+            "--use-type-alias",
+            "--target-python-version",
+            "3.10",
+        ],
+    )
+
+
+@pytest.mark.benchmark
+def test_main_type_alias_cycle_keep_model_order(output_file: Path) -> None:
+    """Test TypeAlias cycle ordering with keep_model_order."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "type_alias_cycle.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=[
+            "--keep-model-order",
+            "--output-model-type",
+            "typing.TypedDict",
+            "--use-standard-collections",
+            "--use-union-operator",
+            "--use-type-alias",
+            "--target-python-version",
+            "3.10",
+        ],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--disable-future-imports"],
+    input_schema="jsonschema/keep_model_order_field_references.json",
+    cli_args=["--disable-future-imports", "--target-python-version", "3.10"],
+    golden_output="main/jsonschema/keep_model_order_field_references.py",
+)
+@pytest.mark.benchmark
+def test_main_keep_model_order_field_references(output_file: Path) -> None:
+    """Prevent automatic addition of __future__ imports in generated code.
+
+    The --disable-future-imports option stops the generator from adding
+    'from __future__ import annotations' to the output. This is useful when
+    you need compatibility with tools or environments that don't support
+    postponed evaluation of annotations (PEP 563).
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "keep_model_order_field_references.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=[
+            "--keep-model-order",
+            "--disable-future-imports",
+            "--target-python-version",
+            "3.10",
+        ],
+    )
+
+
+@pytest.mark.parametrize(
+    ("target_python_version", "keep_model_order", "disable_future_imports"),
+    [
+        ("3.10", False, False),
+        ("3.10", False, True),
+        ("3.10", True, False),
+        ("3.10", True, True),
+        ("3.11", True, False),
+        ("3.11", True, True),
+        ("3.12", True, False),
+        ("3.12", True, True),
+        ("3.13", True, False),
+        ("3.13", True, True),
+        ("3.14", True, False),
+        ("3.14", True, True),
+    ],
+)
+def test_main_keep_model_order_matrix_keep_model_order_field_references(
+    output_file: Path,
+    target_python_version: str,
+    keep_model_order: bool,
+    disable_future_imports: bool,
+) -> None:
+    """E2E matrix for keep_model_order vs deferred annotations.
+
+    When deferred annotations are enabled (default), field references should not
+    force reordering (to avoid meaningless churn). When disabled, ordering must
+    satisfy runtime dependency requirements.
+    """
+    target_version = PythonVersion(target_python_version)
+    if not is_supported_in_black(target_version):
+        pytest.skip(f"Installed black ({black.__version__}) doesn't support Python {target_python_version}")
+
+    args = [
+        "--input",
+        str(JSON_SCHEMA_DATA_PATH / "keep_model_order_field_references.json"),
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--target-python-version",
+        target_python_version,
+        "--formatters",
+        "isort",
+    ]
+    if keep_model_order:
+        args.append("--keep-model-order")
+    if disable_future_imports:
+        args.append("--disable-future-imports")
+
+    run_main_with_args(args)
+    code = output_file.read_text(encoding="utf-8")
+    compile(code, str(output_file), "exec")
+
+    if not keep_model_order:
+        return
+
+    metadata_index = code.index("class Metadata")
+    description_type_index = code.index("class DescriptionType")
+    use_deferred_annotations_for_target = target_version.has_native_deferred_annotations or not disable_future_imports
+    if use_deferred_annotations_for_target:
+        assert description_type_index < metadata_index
+    else:
+        assert metadata_index < description_type_index
+
+    # For targets without native deferred annotations, validate runtime safety
+    # under the current interpreter by executing the generated module.
+    if not target_version.has_native_deferred_annotations:
+        exec(compile(code, str(output_file), "exec"), {})
+
+
+@pytest.mark.cli_doc(
+    options=["--target-python-version"],
+    input_schema="jsonschema/pydantic_v2_model_rebuild_inheritance.json",
+    cli_args=["--output-model-type", "pydantic_v2.BaseModel", "--keep-model-order", "--target-python-version", "3.10"],
+    golden_output="jsonschema/pydantic_v2_model_rebuild_inheritance.py",
+)
+@pytest.mark.benchmark
+def test_main_pydantic_v2_model_rebuild_inheritance(output_file: Path) -> None:
+    """Target Python version for generated code syntax and imports.
+
+    The `--target-python-version` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "pydantic_v2_model_rebuild_inheritance.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--keep-model-order",
+            "--target-python-version",
+            "3.10",
+        ],
+    )
+
+
+@pytest.mark.skip(reason="pytest-xdist does not support the test")
+def test_main_without_arguments() -> None:
+    """Test main function without arguments raises SystemExit."""
+    with pytest.raises(SystemExit):
+        main()
+
+
+@pytest.mark.benchmark
+def test_main_autodetect(output_file: Path) -> None:
+    """Test automatic input file type detection."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "person.json",
+        output_path=output_file,
+        input_file_type="auto",
+        assert_func=assert_file_content,
+    )
+
+
+def test_main_autodetect_failed(tmp_path: Path) -> None:
+    """Test autodetect failure with invalid input."""
+    input_file: Path = tmp_path / "input.yaml"
+    output_file: Path = tmp_path / "output.py"
+    input_file.write_text(":", encoding="utf-8")
+    run_main_and_assert(
+        input_path=input_file,
+        output_path=output_file,
+        input_file_type="auto",
+        expected_exit=Exit.ERROR,
+    )
+
+
+def test_main_jsonschema(output_file: Path) -> None:
+    """Test JSON Schema file code generation."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "person.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="general.py",
+    )
+
+
+def test_main_jsonschema_dataclass_arguments_with_pydantic(output_file: Path) -> None:
+    """Test JSON Schema code generation with dataclass arguments passed but using Pydantic model.
+
+    This verifies that dataclass_arguments is properly ignored for non-dataclass models.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "person.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="general.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic.BaseModel",
+            "--dataclass-arguments",
+            '{"slots": true, "order": true}',
+        ],
+    )
+
+
+def test_main_jsonschema_dataclass_frozen_keyword_only(output_file: Path) -> None:
+    """Test JSON Schema code generation with frozen and keyword-only dataclass.
+
+    This tests the 'if existing:' False branch in _create_data_model when
+    no --dataclass-arguments is provided but --frozen and --keyword-only are set.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "person.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="general_dataclass_frozen_kw_only.py",
+        extra_args=[
+            "--output-model-type",
+            "dataclasses.dataclass",
+            "--frozen",
+            "--keyword-only",
+            "--target-python-version",
+            "3.10",
+        ],
+    )
+
+
+@pytest.mark.benchmark
+def test_main_jsonschema_nested_deep(tmp_path: Path) -> None:
+    """Test deeply nested JSON Schema generation."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "nested_person.json",
+        output_path=tmp_path,
+        output_to_expected=[
+            ("__init__.py", EXPECTED_JSON_SCHEMA_PATH / "nested_deep" / "__init__.py"),
+            ("nested/deep.py", EXPECTED_JSON_SCHEMA_PATH / "nested_deep" / "nested" / "deep.py"),
+            (
+                "empty_parent/nested/deep.py",
+                EXPECTED_JSON_SCHEMA_PATH / "nested_deep" / "empty_parent" / "nested" / "deep.py",
+            ),
+        ],
+        assert_func=assert_file_content,
+        input_file_type="jsonschema",
+    )
+
+
+def test_main_jsonschema_nested_skip(output_dir: Path) -> None:
+    """Test nested JSON Schema with skipped items."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "nested_skip.json",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "nested_skip",
+        input_file_type="jsonschema",
+    )
+
+
+@pytest.mark.benchmark
+def test_main_jsonschema_external_files(output_file: Path) -> None:
+    """Test JSON Schema with external file references."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "external_parent_root.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="external_files.py",
+    )
+
+
+@pytest.mark.benchmark
+def test_main_jsonschema_collapsed_external_references(tmp_path: Path) -> None:
+    """Test collapsed external references in JSON Schema."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "external_reference",
+        output_path=tmp_path,
+        output_to_expected=[
+            ("ref0.py", "external_ref0.py"),
+            ("other/ref2.py", EXPECTED_JSON_SCHEMA_PATH / "external_other_ref2.py"),
+        ],
+        assert_func=assert_file_content,
+        input_file_type="jsonschema",
+        extra_args=["--collapse-root-models"],
+    )
+
+
+@pytest.mark.benchmark
+def test_main_jsonschema_multiple_files(output_dir: Path) -> None:
+    """Test JSON Schema generation from multiple files."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "multiple_files",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "multiple_files",
+        input_file_type="jsonschema",
+    )
+
+
+@pytest.mark.benchmark
+def test_main_jsonschema_no_empty_collapsed_external_model(tmp_path: Path) -> None:
+    """Test no empty files with collapsed external models."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "external_collapse",
+        output_path=tmp_path,
+        file_should_not_exist=tmp_path / "child.py",
+        input_file_type="jsonschema",
+        extra_args=["--collapse-root-models"],
+    )
+    assert (tmp_path / "__init__.py").exists()
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "null_and_array.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "null_and_array_v2.py",
+        ),
+    ],
+)
+@pytest.mark.cli_doc(
+    options=["--output-model-type"],
+    input_schema="jsonschema/null_and_array.json",
+    cli_args=["--output-model-type", "pydantic.BaseModel"],
+    model_outputs={
+        "pydantic_v1": "main/jsonschema/null_and_array.py",
+        "pydantic_v2": "main/jsonschema/null_and_array_v2.py",
+    },
+    primary=True,
+)
+def test_main_null_and_array(output_model: str, expected_output: str, output_file: Path) -> None:
+    """Select the output model type (Pydantic v1/v2, dataclasses, TypedDict, msgspec).
+
+    The `--output-model-type` flag specifies which Python data model framework to use
+    for the generated code. Supported values include `pydantic.BaseModel`,
+    `pydantic_v2.BaseModel`, `dataclasses.dataclass`, `typing.TypedDict`, and
+    `msgspec.Struct`.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "null_and_array.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--output-model-type", output_model],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-default"],
+    input_schema="jsonschema/use_default_with_const.json",
+    cli_args=["--output-model-type", "pydantic_v2.BaseModel", "--use-default"],
+    golden_output="jsonschema/use_default_with_const.py",
+)
+def test_use_default_pydantic_v2_with_json_schema_const(output_file: Path) -> None:
+    """Use default values from schema in generated models.
+
+    The `--use-default` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "use_default_with_const.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="use_default_with_const.py",
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel", "--use-default"],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output", "option"),
+    [
+        (
+            "pydantic.BaseModel",
+            "complicated_enum_default_member.py",
+            "--set-default-enum-member",
+        ),
+        (
+            "dataclasses.dataclass",
+            "complicated_enum_default_member_dataclass.py",
+            "--set-default-enum-member",
+        ),
+        (
+            "dataclasses.dataclass",
+            "complicated_enum_default_member_dataclass.py",
+            None,
+        ),
+    ],
+)
+def test_main_complicated_enum_default_member(
+    output_model: str, expected_output: str, option: str | None, output_file: Path
+) -> None:
+    """Test complicated enum with default member."""
+    extra_args = [a for a in [option, "--output-model-type", output_model] if a]
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "complicated_enum.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=extra_args,
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--set-default-enum-member"],
+    input_schema="jsonschema/duplicate_enum.json",
+    cli_args=["--reuse-model", "--set-default-enum-member"],
+    golden_output="jsonschema/json_reuse_enum_default_member.py",
+)
+@pytest.mark.benchmark
+def test_main_json_reuse_enum_default_member(output_file: Path) -> None:
+    """Set the first enum member as the default value for enum fields.
+
+    The `--set-default-enum-member` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "duplicate_enum.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=["--reuse-model", "--set-default-enum-member"],
+    )
+
+
+def test_main_invalid_model_name_failed(capsys: pytest.CaptureFixture[str], output_file: Path) -> None:
+    """Test invalid model name error handling."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "invalid_model_name.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        extra_args=["--class-name", "with"],
+        expected_exit=Exit.ERROR,
+        capsys=capsys,
+        expected_stderr_contains="title='with' is invalid class name. You have to set `--class-name` option",
+    )
+
+
+def test_main_invalid_model_name_converted(capsys: pytest.CaptureFixture[str], output_file: Path) -> None:
+    """Test invalid model name conversion error."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "invalid_model_name.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        expected_exit=Exit.ERROR,
+        capsys=capsys,
+        expected_stderr_contains="title='1Xyz' is invalid class name. You have to set `--class-name` option",
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--class-name"],
+    input_schema="jsonschema/invalid_model_name.json",
+    cli_args=["--class-name", "ValidModelName"],
+    golden_output="main/jsonschema/invalid_model_name.py",
+)
+def test_main_invalid_model_name(output_file: Path) -> None:
+    """Override the auto-generated class name with a custom name.
+
+    The --class-name option allows you to specify a custom class name for the
+    generated model. This is useful when the schema title is invalid as a Python
+    class name (e.g., starts with a number) or when you want to use a different
+    naming convention than what's in the schema.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "invalid_model_name.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=["--class-name", "ValidModelName"],
+    )
+
+
+def test_main_jsonschema_reserved_field_names(output_file: Path) -> None:
+    """Test reserved names are safely suffixed and aliased."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "reserved_property.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="reserved_property.py",
+    )
+
+
+def test_main_jsonschema_with_local_anchor(output_file: Path) -> None:
+    """Test $id anchor lookup resolves without error and reuses definitions."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "with_anchor.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="with_anchor.py",
+    )
+
+
+def test_main_jsonschema_missing_anchor_reports_error(capsys: pytest.CaptureFixture[str], output_file: Path) -> None:
+    """Test missing $id anchor produces a clear error instead of KeyError trace."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "missing_anchor.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        expected_exit=Exit.ERROR,
+        capsys=capsys,
+        expected_stderr_contains="Unresolved $id reference '#address'",
+    )
+
+
+def test_main_root_id_jsonschema_with_local_file(mocker: MockerFixture, output_file: Path) -> None:
+    """Test root ID JSON Schema with local file reference."""
+    root_id_response = mocker.Mock()
+    root_id_response.text = "dummy"
+    person_response = mocker.Mock()
+    person_response.text = (JSON_SCHEMA_DATA_PATH / "person.json").read_text()
+    httpx_get_mock = mocker.patch("httpx.get", side_effect=[person_response])
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "root_id.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="root_id.py",
+    )
+    httpx_get_mock.assert_not_called()
+
+
+def test_main_root_id_jsonschema_with_remote_file(mocker: MockerFixture, tmp_path: Path) -> None:
+    """Test root ID JSON Schema with remote file reference."""
+    root_id_response = mocker.Mock()
+    root_id_response.text = "dummy"
+    person_response = mocker.Mock()
+    person_response.text = (JSON_SCHEMA_DATA_PATH / "person.json").read_text()
+    httpx_get_mock = mocker.patch("httpx.get", side_effect=[person_response])
+    input_file = tmp_path / "root_id.json"
+    output_file: Path = tmp_path / "output.py"
+    run_main_and_assert(
+        input_path=input_file,
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="root_id.py",
+        copy_files=[(JSON_SCHEMA_DATA_PATH / "root_id.json", input_file)],
+    )
+    httpx_get_mock.assert_has_calls([
+        call(
+            "https://example.com/person.json",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+    ])
+
+
+@pytest.mark.benchmark
+def test_main_root_id_jsonschema_self_refs_with_local_file(mocker: MockerFixture, output_file: Path) -> None:
+    """Test root ID JSON Schema self-references with local file."""
+    person_response = mocker.Mock()
+    person_response.text = (JSON_SCHEMA_DATA_PATH / "person.json").read_text()
+    httpx_get_mock = mocker.patch("httpx.get", side_effect=[person_response])
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "root_id_self_ref.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="root_id.py",
+        transform=lambda s: s.replace("filename:  root_id_self_ref.json", "filename:  root_id.json"),
+    )
+    httpx_get_mock.assert_not_called()
+
+
+@pytest.mark.benchmark
+def test_main_root_id_jsonschema_self_refs_with_remote_file(mocker: MockerFixture, tmp_path: Path) -> None:
+    """Test root ID JSON Schema self-references with remote file."""
+    person_response = mocker.Mock()
+    person_response.text = (JSON_SCHEMA_DATA_PATH / "person.json").read_text()
+    httpx_get_mock = mocker.patch("httpx.get", side_effect=[person_response])
+    input_file = tmp_path / "root_id_self_ref.json"
+    output_file: Path = tmp_path / "output.py"
+    run_main_and_assert(
+        input_path=input_file,
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="root_id.py",
+        transform=lambda s: s.replace("filename:  root_id_self_ref.json", "filename:  root_id.json"),
+        copy_files=[(JSON_SCHEMA_DATA_PATH / "root_id_self_ref.json", input_file)],
+    )
+    httpx_get_mock.assert_has_calls([
+        call(
+            "https://example.com/person.json",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+    ])
+
+
+def test_main_root_id_jsonschema_with_absolute_remote_file(mocker: MockerFixture, tmp_path: Path) -> None:
+    """Test root ID JSON Schema with absolute remote file URL."""
+    root_id_response = mocker.Mock()
+    root_id_response.text = "dummy"
+    person_response = mocker.Mock()
+    person_response.text = (JSON_SCHEMA_DATA_PATH / "person.json").read_text()
+    httpx_get_mock = mocker.patch("httpx.get", side_effect=[person_response])
+    input_file = tmp_path / "root_id_absolute_url.json"
+    output_file: Path = tmp_path / "output.py"
+    run_main_and_assert(
+        input_path=input_file,
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="root_id_absolute_url.py",
+        copy_files=[(JSON_SCHEMA_DATA_PATH / "root_id_absolute_url.json", input_file)],
+    )
+    httpx_get_mock.assert_has_calls([
+        call(
+            "https://example.com/person.json",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+    ])
+
+
+def test_main_root_id_jsonschema_with_absolute_local_file(output_file: Path) -> None:
+    """Test root ID JSON Schema with absolute local file path."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "root_id_absolute_url.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="root_id_absolute_url.py",
+    )
+
+
+@pytest.mark.benchmark
+def test_main_jsonschema_id(output_file: Path) -> None:
+    """Test JSON Schema with ID field."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "id.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="id.py",
+    )
+
+
+def test_main_jsonschema_id_as_stdin(monkeypatch: pytest.MonkeyPatch, output_file: Path) -> None:
+    """Test JSON Schema ID handling from stdin."""
+    run_main_and_assert(
+        stdin_path=JSON_SCHEMA_DATA_PATH / "id.json",
+        output_path=output_file,
+        monkeypatch=monkeypatch,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="id_stdin.py",
+    )
+
+
+def test_main_jsonschema_stdin_oneof_ref(monkeypatch: pytest.MonkeyPatch, output_file: Path) -> None:
+    """Test JSON Schema with oneOf $ref from stdin."""
+    run_main_and_assert(
+        stdin_path=JSON_SCHEMA_DATA_PATH / "stdin_oneof_ref.json",
+        output_path=output_file,
+        monkeypatch=monkeypatch,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="stdin_oneof_ref.py",
+    )
+
+
+def test_main_jsonschema_ids(output_dir: Path) -> None:
+    """Test JSON Schema with multiple IDs."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=JSON_SCHEMA_DATA_PATH / "ids" / "Organization.schema.json",
+            output_path=output_dir,
+            expected_directory=EXPECTED_JSON_SCHEMA_PATH / "ids",
+            input_file_type="jsonschema",
+        )
+
+
+@pytest.mark.benchmark
+def test_main_external_definitions(output_file: Path) -> None:
+    """Test external definitions in JSON Schema."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "external_definitions_root.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+def test_main_external_files_in_directory(output_file: Path) -> None:
+    """Test external files in directory structure."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "external_files_in_directory" / "person.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+def test_main_nested_directory(output_dir: Path) -> None:
+    """Test nested directory structure generation."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "external_files_in_directory",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "nested_directory",
+        input_file_type="jsonschema",
+    )
+
+
+def test_main_circular_reference(output_file: Path) -> None:
+    """Test circular reference handling."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "circular_reference.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+def test_main_invalid_enum_name(output_file: Path) -> None:
+    """Test invalid enum name handling."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "invalid_enum_name.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--snake-case-field"],
+    input_schema="jsonschema/invalid_enum_name.json",
+    cli_args=["--snake-case-field"],
+    golden_output="jsonschema/invalid_enum_name_snake_case_field.py",
+    related_options=["--capitalize-enum-members"],
+)
+def test_main_invalid_enum_name_snake_case_field(output_file: Path) -> None:
+    """Convert field names to snake_case format.
+
+    The `--snake-case-field` flag converts camelCase or PascalCase field names
+    to snake_case format in the generated Python code, following Python naming
+    conventions (PEP 8).
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "invalid_enum_name.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=["--snake-case-field"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--reuse-model"],
+    input_schema="jsonschema/duplicate_enum.json",
+    cli_args=["--reuse-model"],
+    golden_output="jsonschema/json_reuse_enum.py",
+    related_options=["--collapse-root-models"],
+)
+def test_main_json_reuse_enum(output_file: Path) -> None:
+    """Reuse identical model definitions instead of generating duplicates.
+
+    The `--reuse-model` flag detects identical enum or model definitions
+    across the schema and generates a single shared definition, reducing
+    code duplication in the output.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "duplicate_enum.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=["--reuse-model"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--capitalize-enum-members"],
+    input_schema="jsonschema/many_case_enum.json",
+    cli_args=["--capitalize-enum-members"],
+    golden_output="jsonschema/json_capitalise_enum_members.py",
+    related_options=["--snake-case-field"],
+    aliases=["--capitalise-enum-members"],
+)
+def test_main_json_capitalise_enum_members(output_file: Path) -> None:
+    """Capitalize enum member names to UPPER_CASE format.
+
+    The `--capitalize-enum-members` flag converts enum member names to
+    UPPER_CASE format (e.g., `active` becomes `ACTIVE`), following Python
+    naming conventions for constants.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "many_case_enum.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=["--capitalise-enum-members"],
+    )
+
+
+def test_main_json_capitalise_enum_members_without_enum(output_file: Path) -> None:
+    """Test enum member capitalization without enum flag."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "person.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="autodetect.py",
+    )
+
+
+def test_main_similar_nested_array(output_file: Path) -> None:
+    """Test similar nested array structures."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "similar_nested_array.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "require_referenced_field",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "require_referenced_field_pydantic_v2",
+        ),
+    ],
+)
+def test_main_require_referenced_field(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    """Test required referenced fields."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "require_referenced_field/",
+        output_path=tmp_path,
+        output_to_expected=[
+            ("referenced.py", f"{expected_output}/referenced.py"),
+            ("required.py", f"{expected_output}/required.py"),
+        ],
+        assert_func=assert_file_content,
+        input_file_type="jsonschema",
+        extra_args=["--output-datetime-class", "AwareDatetime", "--output-model-type", output_model],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "require_referenced_field",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "require_referenced_field_naivedatetime",
+        ),
+    ],
+)
+def test_main_require_referenced_field_naive_datetime(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    """Test required referenced field with naive datetime."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "require_referenced_field/",
+        output_path=tmp_path,
+        output_to_expected=[
+            ("referenced.py", f"{expected_output}/referenced.py"),
+            ("required.py", f"{expected_output}/required.py"),
+        ],
+        assert_func=assert_file_content,
+        input_file_type="jsonschema",
+        extra_args=["--output-datetime-class", "NaiveDatetime", "--output-model-type", output_model],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "require_referenced_field",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "require_referenced_field_pydantic_v2",
+        ),
+        (
+            "msgspec.Struct",
+            "require_referenced_field_msgspec",
+        ),
+    ],
+)
+def test_main_require_referenced_field_datetime(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    """Test required referenced field with datetime."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "require_referenced_field/",
+        output_path=tmp_path,
+        output_to_expected=[
+            ("referenced.py", f"{expected_output}/referenced.py"),
+            ("required.py", f"{expected_output}/required.py"),
+        ],
+        assert_func=assert_file_content,
+        input_file_type="jsonschema",
+        extra_args=["--output-model-type", output_model],
+    )
+
+
+def test_main_json_pointer(output_file: Path) -> None:
+    """Test JSON pointer references."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "json_pointer.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+def test_main_nested_json_pointer(output_file: Path) -> None:
+    """Test nested JSON pointer references."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "nested_json_pointer.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+def test_main_jsonschema_multiple_files_json_pointer(output_dir: Path) -> None:
+    """Test JSON pointer with multiple files."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "multiple_files_json_pointer",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "multiple_files_json_pointer",
+        input_file_type="jsonschema",
+    )
+
+
+def test_main_root_model_with_additional_properties(output_file: Path) -> None:
+    """Test root model with additional properties."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "root_model_with_additional_properties.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-generic-container-types"],
+    input_schema="jsonschema/root_model_with_additional_properties.json",
+    cli_args=["--use-generic-container-types"],
+    golden_output="jsonschema/root_model_with_additional_properties_use_generic_container_types.py",
+    related_options=["--use-standard-collections"],
+)
+def test_main_root_model_with_additional_properties_use_generic_container_types(output_file: Path) -> None:
+    """Use typing.Dict/List instead of dict/list for container types.
+
+    The `--use-generic-container-types` flag generates typing module generic
+    containers (Dict, List, etc.) instead of built-in types. This is useful for
+    Python 3.8 compatibility or when explicit typing imports are preferred.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "root_model_with_additional_properties.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=["--use-generic-container-types"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-standard-collections"],
+    input_schema="jsonschema/root_model_with_additional_properties.json",
+    cli_args=["--use-standard-collections"],
+    golden_output="jsonschema/root_model_with_additional_properties_use_standard_collections.py",
+    related_options=["--use-generic-container-types"],
+)
+def test_main_root_model_with_additional_properties_use_standard_collections(output_file: Path) -> None:
+    """Use built-in dict/list instead of typing.Dict/List.
+
+    The `--use-standard-collections` flag generates built-in container types
+    (dict, list) instead of typing module equivalents. This produces cleaner
+    code for Python 3.9+ where built-in types support subscripting.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "root_model_with_additional_properties.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=["--use-standard-collections"],
+    )
+
+
+def test_main_root_model_with_additional_properties_literal(min_version: str, output_file: Path) -> None:
+    """Test root model additional properties with literal types."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "root_model_with_additional_properties.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=["--enum-field-as-literal", "all", "--target-python-version", min_version],
+    )
+
+
+def test_main_jsonschema_multiple_files_ref(output_dir: Path) -> None:
+    """Test multiple files with references."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "multiple_files_self_ref",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "multiple_files_self_ref",
+        input_file_type="jsonschema",
+    )
+
+
+def test_main_jsonschema_multiple_files_ref_test_json(output_file: Path) -> None:
+    """Test main jsonschema multiple files ref json."""
+    with chdir(JSON_SCHEMA_DATA_PATH / "multiple_files_self_ref"):
+        run_main_and_assert(
+            input_path=Path("test.json"),
+            output_path=output_file,
+            input_file_type="jsonschema",
+            assert_func=assert_file_content,
+            expected_file="multiple_files_self_ref_single.py",
+        )
+
+
+@pytest.mark.cli_doc(
+    options=["--original-field-name-delimiter"],
+    input_schema="jsonschema/space_field_enum.json",
+    cli_args=["--snake-case-field", "--original-field-name-delimiter", " "],
+    golden_output="main/jsonschema/space_field_enum_snake_case_field.py",
+)
+def test_main_space_field_enum_snake_case_field(output_file: Path) -> None:
+    """Specify delimiter for original field names when using snake-case conversion.
+
+    The `--original-field-name-delimiter` option works with `--snake-case-field` to specify
+    the delimiter used in original field names. This is useful when field names contain
+    delimiters like spaces or hyphens that should be treated as word boundaries during
+    snake_case conversion.
+    """
+    with chdir(JSON_SCHEMA_DATA_PATH / "space_field_enum.json"):
+        run_main_and_assert(
+            input_path=Path("space_field_enum.json"),
+            output_path=output_file,
+            input_file_type="jsonschema",
+            assert_func=assert_file_content,
+            extra_args=["--snake-case-field", "--original-field-name-delimiter", " "],
+        )
+
+
+@pytest.mark.benchmark
+def test_main_all_of_ref(output_file: Path) -> None:
+    """Test allOf with references."""
+    with chdir(JSON_SCHEMA_DATA_PATH / "all_of_ref"):
+        run_main_and_assert(
+            input_path=Path("test.json"),
+            output_path=output_file,
+            input_file_type="jsonschema",
+            assert_func=assert_file_content,
+            extra_args=["--class-name", "Test"],
+        )
+
+
+def test_main_all_of_with_object(output_file: Path) -> None:
+    """Test allOf with object types."""
+    with chdir(JSON_SCHEMA_DATA_PATH):
+        run_main_and_assert(
+            input_path=Path("all_of_with_object.json"),
+            output_path=output_file,
+            input_file_type="jsonschema",
+            assert_func=assert_file_content,
+        )
+
+
+def test_main_all_of_merge_same_property(output_file: Path) -> None:
+    """Test allOf merging when duplicate property names exist across refs."""
+    with chdir(JSON_SCHEMA_DATA_PATH):
+        run_main_and_assert(
+            input_path=Path("all_of_merge_same_property.json"),
+            output_path=output_file,
+            input_file_type="jsonschema",
+            assert_func=assert_file_content,
+            expected_file="all_of_merge_same_property.py",
+            extra_args=["--class-name", "Model"],
+        )
+
+
+def test_main_all_of_merge_boolean_property(output_file: Path) -> None:
+    """Test allOf merging when a property has a boolean schema (false)."""
+    with chdir(JSON_SCHEMA_DATA_PATH):
+        run_main_and_assert(
+            input_path=Path("all_of_merge_boolean_property.json"),
+            output_path=output_file,
+            input_file_type="jsonschema",
+            assert_func=assert_file_content,
+            expected_file="all_of_merge_boolean_property.py",
+            extra_args=["--class-name", "Model"],
+        )
+
+
+def test_main_all_of_ref_with_property_override(output_file: Path) -> None:
+    """Test allOf with $ref preserves inheritance when properties are overridden."""
+    with chdir(JSON_SCHEMA_DATA_PATH):
+        run_main_and_assert(
+            input_path=Path("all_of_ref_with_property_override.json"),
+            output_path=output_file,
+            input_file_type="jsonschema",
+            assert_func=assert_file_content,
+            expected_file="all_of_ref_with_property_override.py",
+        )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_combined_array(output_file: Path) -> None:
+    """Test combined array types."""
+    with chdir(JSON_SCHEMA_DATA_PATH):
+        run_main_and_assert(
+            input_path=Path("combined_array.json"),
+            output_path=output_file,
+            input_file_type="jsonschema",
+            assert_func=assert_file_content,
+        )
+
+
+@pytest.mark.cli_doc(
+    options=["--disable-timestamp"],
+    input_schema="jsonschema/pattern.json",
+    cli_args=["--disable-timestamp"],
+    golden_output="jsonschema/pattern.py",
+)
+def test_main_jsonschema_pattern(output_file: Path) -> None:
+    """Disable timestamp in generated file header for reproducible output.
+
+    The `--disable-timestamp` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "pattern.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="pattern.py",
+        extra_args=["--disable-timestamp"],
+    )
+
+
+def test_main_generate(tmp_path: Path) -> None:
+    """Test code generation function."""
+    output_file: Path = tmp_path / "output.py"
+    input_ = (JSON_SCHEMA_DATA_PATH / "person.json").relative_to(Path.cwd())
+    assert not input_.is_absolute()
+    generate(
+        input_=input_,
+        input_file_type=InputFileType.JsonSchema,
+        output=output_file,
+    )
+
+    assert_file_content(output_file, "general.py")
+
+
+def test_main_generate_non_pydantic_output(tmp_path: Path) -> None:
+    """Test generation with non-Pydantic output models (see issue #1452)."""
+    output_file: Path = tmp_path / "output.py"
+    input_ = (JSON_SCHEMA_DATA_PATH / "simple_string.json").relative_to(Path.cwd())
+    assert not input_.is_absolute()
+    generate(
+        input_=input_,
+        input_file_type=InputFileType.JsonSchema,
+        output=output_file,
+        output_model_type=DataModelType.DataclassesDataclass,
+    )
+
+    assert_file_content(output_file, "generate_non_pydantic_output.py")
+
+
+def test_main_generate_from_directory(tmp_path: Path) -> None:
+    """Test generation from directory input."""
+    input_ = (JSON_SCHEMA_DATA_PATH / "external_files_in_directory").relative_to(Path.cwd())
+    assert not input_.is_absolute()
+    assert input_.is_dir()
+    generate(
+        input_=input_,
+        input_file_type=InputFileType.JsonSchema,
+        output=tmp_path,
+    )
+
+    main_nested_directory = EXPECTED_JSON_SCHEMA_PATH / "nested_directory"
+    assert_directory_content(tmp_path, main_nested_directory)
+
+
+def test_main_generate_custom_class_name_generator(tmp_path: Path) -> None:
+    """Test custom class name generator."""
+
+    def custom_class_name_generator(title: str) -> str:
+        return f"Custom{title}"
+
+    output_file: Path = tmp_path / "output.py"
+    input_ = (JSON_SCHEMA_DATA_PATH / "person.json").relative_to(Path.cwd())
+    assert not input_.is_absolute()
+    generate(
+        input_=input_,
+        input_file_type=InputFileType.JsonSchema,
+        output=output_file,
+        custom_class_name_generator=custom_class_name_generator,
+    )
+
+    assert_file_content(
+        output_file,
+        "general.py",
+        transform=lambda s: s.replace("CustomPerson", "Person"),
+    )
+
+
+def test_main_generate_custom_class_name_generator_additional_properties(tmp_path: Path) -> None:
+    """Test custom class name generator with additional properties."""
+    output_file = tmp_path / "models.py"
+
+    def custom_class_name_generator(name: str) -> str:
+        return f"Custom{name[0].upper() + name[1:]}"
+
+    input_ = (JSON_SCHEMA_DATA_PATH / "root_model_with_additional_properties.json").relative_to(Path.cwd())
+    assert not input_.is_absolute()
+    generate(
+        input_=input_,
+        input_file_type=InputFileType.JsonSchema,
+        output=output_file,
+        custom_class_name_generator=custom_class_name_generator,
+    )
+
+    assert_file_content(output_file, "root_model_with_additional_properties_custom_class_name.py")
+
+
+def test_main_http_jsonschema(mocker: MockerFixture, output_file: Path) -> None:
+    """Test HTTP JSON Schema fetching."""
+    external_directory = JSON_SCHEMA_DATA_PATH / "external_files_in_directory"
+    base_url = "https://example.com/external_files_in_directory/"
+
+    url_to_path = {
+        f"{base_url}person.json": "person.json",
+        f"{base_url}definitions/relative/animal/pet/pet.json": "definitions/relative/animal/pet/pet.json",
+        f"{base_url}definitions/relative/animal/fur.json": "definitions/relative/animal/fur.json",
+        f"{base_url}definitions/friends.json": "definitions/friends.json",
+        f"{base_url}definitions/food.json": "definitions/food.json",
+        f"{base_url}definitions/machine/robot.json": "definitions/machine/robot.json",
+        f"{base_url}definitions/drink/coffee.json": "definitions/drink/coffee.json",
+        f"{base_url}definitions/drink/tea.json": "definitions/drink/tea.json",
+    }
+
+    def get_mock_response(url: str, **_: object) -> mocker.Mock:
+        path = url_to_path.get(url)
+        mock = mocker.Mock()
+        mock.text = (external_directory / path).read_text()
+        return mock
+
+    httpx_get_mock = mocker.patch(
+        "httpx.get",
+        side_effect=get_mock_response,
+    )
+    run_main_url_and_assert(
+        url="https://example.com/external_files_in_directory/person.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="external_files_in_directory.py",
+        transform=lambda s: s.replace(
+            "#   filename:  https://example.com/external_files_in_directory/person.json",
+            "#   filename:  person.json",
+        ),
+    )
+    httpx_get_mock.assert_has_calls(
+        [
+            call(
+                "https://example.com/external_files_in_directory/person.json",
+                headers=None,
+                verify=True,
+                follow_redirects=True,
+                params=None,
+            ),
+            call(
+                "https://example.com/external_files_in_directory/definitions/relative/animal/pet/pet.json",
+                headers=None,
+                verify=True,
+                follow_redirects=True,
+                params=None,
+            ),
+            call(
+                "https://example.com/external_files_in_directory/definitions/relative/animal/fur.json",
+                headers=None,
+                verify=True,
+                follow_redirects=True,
+                params=None,
+            ),
+            call(
+                "https://example.com/external_files_in_directory/definitions/friends.json",
+                headers=None,
+                verify=True,
+                follow_redirects=True,
+                params=None,
+            ),
+            call(
+                "https://example.com/external_files_in_directory/definitions/food.json",
+                headers=None,
+                verify=True,
+                follow_redirects=True,
+                params=None,
+            ),
+            call(
+                "https://example.com/external_files_in_directory/definitions/machine/robot.json",
+                headers=None,
+                verify=True,
+                follow_redirects=True,
+                params=None,
+            ),
+            call(
+                "https://example.com/external_files_in_directory/definitions/drink/coffee.json",
+                headers=None,
+                verify=True,
+                follow_redirects=True,
+                params=None,
+            ),
+            call(
+                "https://example.com/external_files_in_directory/definitions/drink/tea.json",
+                headers=None,
+                verify=True,
+                follow_redirects=True,
+                params=None,
+            ),
+        ],
+        any_order=True,
+    )
+    assert httpx_get_mock.call_count == 8
+
+
+@pytest.mark.parametrize(
+    (
+        "headers_arguments",
+        "headers_requests",
+        "query_parameters_arguments",
+        "query_parameters_requests",
+        "http_ignore_tls",
+    ),
+    [
+        (
+            ("Authorization: Basic dXNlcjpwYXNz",),
+            [("Authorization", "Basic dXNlcjpwYXNz")],
+            ("key=value",),
+            [("key", "value")],
+            False,
+        ),
+        (
+            ("Authorization: Basic dXNlcjpwYXNz", "X-API-key: abcefg"),
+            [("Authorization", "Basic dXNlcjpwYXNz"), ("X-API-key", "abcefg")],
+            ("key=value", "newkey=newvalue"),
+            [("key", "value"), ("newkey", "newvalue")],
+            True,
+        ),
+    ],
+)
+def test_main_http_jsonschema_with_http_headers_and_http_query_parameters_and_ignore_tls(
+    mocker: MockerFixture,
+    headers_arguments: tuple[str, str],
+    headers_requests: list[tuple[str, str]],
+    query_parameters_arguments: tuple[str, ...],
+    query_parameters_requests: list[tuple[str, str]],
+    http_ignore_tls: bool,
+    tmp_path: Path,
+) -> None:
+    """Test HTTP JSON Schema with headers, query params, and TLS ignore."""
+    external_directory = JSON_SCHEMA_DATA_PATH / "external_files_in_directory"
+    base_url = "https://example.com/external_files_in_directory/"
+
+    url_to_path = {
+        f"{base_url}person.json": "person.json",
+        f"{base_url}definitions/relative/animal/pet/pet.json": "definitions/relative/animal/pet/pet.json",
+        f"{base_url}definitions/relative/animal/fur.json": "definitions/relative/animal/fur.json",
+        f"{base_url}definitions/friends.json": "definitions/friends.json",
+        f"{base_url}definitions/food.json": "definitions/food.json",
+        f"{base_url}definitions/machine/robot.json": "definitions/machine/robot.json",
+        f"{base_url}definitions/drink/coffee.json": "definitions/drink/coffee.json",
+        f"{base_url}definitions/drink/tea.json": "definitions/drink/tea.json",
+    }
+
+    def get_mock_response(url: str, **_: object) -> mocker.Mock:
+        path = url_to_path.get(url)
+        mock = mocker.Mock()
+        mock.text = (external_directory / path).read_text()
+        return mock
+
+    httpx_get_mock = mocker.patch(
+        "httpx.get",
+        side_effect=get_mock_response,
+    )
+    output_file: Path = tmp_path / "output.py"
+    extra_args = [
+        "--http-headers",
+        *headers_arguments,
+        "--http-query-parameters",
+        *query_parameters_arguments,
+    ]
+    if http_ignore_tls:
+        extra_args.append("--http-ignore-tls")
+
+    run_main_url_and_assert(
+        url="https://example.com/external_files_in_directory/person.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="external_files_in_directory.py",
+        extra_args=extra_args,
+        transform=lambda s: s.replace(
+            "#   filename:  https://example.com/external_files_in_directory/person.json",
+            "#   filename:  person.json",
+        ),
+    )
+    httpx_get_mock.assert_has_calls(
+        [
+            call(
+                "https://example.com/external_files_in_directory/person.json",
+                headers=headers_requests,
+                verify=bool(not http_ignore_tls),
+                follow_redirects=True,
+                params=query_parameters_requests,
+            ),
+            call(
+                "https://example.com/external_files_in_directory/definitions/relative/animal/pet/pet.json",
+                headers=headers_requests,
+                verify=bool(not http_ignore_tls),
+                follow_redirects=True,
+                params=query_parameters_requests,
+            ),
+            call(
+                "https://example.com/external_files_in_directory/definitions/relative/animal/fur.json",
+                headers=headers_requests,
+                verify=bool(not http_ignore_tls),
+                follow_redirects=True,
+                params=query_parameters_requests,
+            ),
+            call(
+                "https://example.com/external_files_in_directory/definitions/friends.json",
+                headers=headers_requests,
+                verify=bool(not http_ignore_tls),
+                follow_redirects=True,
+                params=query_parameters_requests,
+            ),
+            call(
+                "https://example.com/external_files_in_directory/definitions/food.json",
+                headers=headers_requests,
+                verify=bool(not http_ignore_tls),
+                follow_redirects=True,
+                params=query_parameters_requests,
+            ),
+            call(
+                "https://example.com/external_files_in_directory/definitions/machine/robot.json",
+                headers=headers_requests,
+                verify=bool(not http_ignore_tls),
+                follow_redirects=True,
+                params=query_parameters_requests,
+            ),
+            call(
+                "https://example.com/external_files_in_directory/definitions/drink/coffee.json",
+                headers=headers_requests,
+                verify=bool(not http_ignore_tls),
+                follow_redirects=True,
+                params=query_parameters_requests,
+            ),
+            call(
+                "https://example.com/external_files_in_directory/definitions/drink/tea.json",
+                headers=headers_requests,
+                verify=bool(not http_ignore_tls),
+                follow_redirects=True,
+                params=query_parameters_requests,
+            ),
+        ],
+        any_order=True,
+    )
+    assert httpx_get_mock.call_count == 8
+
+
+def test_main_self_reference(output_file: Path) -> None:
+    """Test self-referencing schemas."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "self_reference.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+@pytest.mark.benchmark
+def test_main_strict_types(output_file: Path) -> None:
+    """Test strict type generation."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "strict_types.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--strict-types"],
+    input_schema="jsonschema/strict_types.json",
+    cli_args=["--strict-types", "str", "bytes", "int", "float", "bool"],
+    golden_output="main/jsonschema/strict_types_all.py",
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_strict_types_all(output_file: Path) -> None:
+    """Enable strict type validation for specified Python types.
+
+    The --strict-types option enforces stricter type checking by preventing implicit
+    type coercion for the specified types (str, bytes, int, float, bool). This
+    generates StrictStr, StrictBytes, StrictInt, StrictFloat, and StrictBool types
+    in Pydantic models, ensuring values match exactly without automatic conversion.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "strict_types.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=["--strict-types", "str", "bytes", "int", "float", "bool"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--field-constraints"],
+    input_schema="jsonschema/strict_types.json",
+    cli_args=["--strict-types", "str", "bytes", "int", "float", "bool", "--field-constraints"],
+    golden_output="jsonschema/strict_types_all_field_constraints.py",
+    related_options=["--strict-types"],
+)
+def test_main_strict_types_all_with_field_constraints(output_file: Path) -> None:
+    """Generate Field() with validation constraints from schema.
+
+    The `--field-constraints` flag generates Pydantic Field() declarations with
+    validation constraints (min/max length, pattern, minimum/maximum values, etc.)
+    extracted from the JSON Schema, enabling runtime validation.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "strict_types.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="strict_types_all_field_constraints.py",
+        extra_args=["--strict-types", "str", "bytes", "int", "float", "bool", "--field-constraints"],
+    )
+
+
+def test_main_jsonschema_special_enum(output_file: Path) -> None:
+    """Test special enum handling."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "special_enum.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="special_enum.py",
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--special-field-name-prefix"],
+    input_schema="jsonschema/special_enum.json",
+    cli_args=["--special-field-name-prefix", "special"],
+    golden_output="jsonschema/special_enum_special_field_name_prefix.py",
+)
+def test_main_jsonschema_special_enum_special_field_name_prefix(output_file: Path) -> None:
+    """Prefix to add to special field names (like reserved keywords).
+
+    The `--special-field-name-prefix` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "special_enum.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="special_enum_special_field_name_prefix.py",
+        extra_args=["--special-field-name-prefix", "special"],
+    )
+
+
+def test_main_jsonschema_special_enum_special_field_name_prefix_keep_private(output_file: Path) -> None:
+    """Test special enum with prefix keeping private fields."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "special_enum.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="special_enum_special_field_name_prefix_keep_private.py",
+        extra_args=["--special-field-name-prefix", ""],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--remove-special-field-name-prefix"],
+    input_schema="jsonschema/special_prefix_model.json",
+    cli_args=["--remove-special-field-name-prefix"],
+    golden_output="jsonschema/special_model_remove_special_field_name_prefix.py",
+)
+def test_main_jsonschema_special_model_remove_special_field_name_prefix(output_file: Path) -> None:
+    """Remove the special prefix from field names.
+
+    The `--remove-special-field-name-prefix` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "special_prefix_model.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="special_model_remove_special_field_name_prefix.py",
+        extra_args=["--remove-special-field-name-prefix"],
+    )
+
+
+def test_main_jsonschema_subclass_enum(output_file: Path) -> None:
+    """Test enum subclassing."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "subclass_enum.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="subclass_enum.py",
+        extra_args=["--use-subclass-enum"],
+    )
+
+
+def test_main_jsonschema_allof_enum_ref(output_file: Path) -> None:
+    """Test allOf referencing enum from another schema."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "allof_enum_ref.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+def test_main_jsonschema_allof_enum_no_external_ref(output_file: Path) -> None:
+    """Test allOf referencing enum without external $ref.
+
+    This covers the case where existing_ref is None in parse_all_of,
+    so the schema is optimized to directly return the enum reference.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "allof_enum_no_external_ref.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "22",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.cli_doc(
+    options=["--use-specialized-enum"],
+    input_schema="jsonschema/subclass_enum.json",
+    cli_args=["--target-python-version", "3.11", "--use-specialized-enum"],
+    golden_output="jsonschema/enum_specialized.py",
+    related_options=["--no-use-specialized-enum", "--use-subclass-enum"],
+)
+def test_main_jsonschema_specialized_enums(output_file: Path) -> None:
+    """Generate StrEnum/IntEnum for string/integer enums (Python 3.11+).
+
+    The `--use-specialized-enum` flag generates specialized enum types:
+    - `StrEnum` for string enums
+    - `IntEnum` for integer enums
+
+    This is the default behavior for Python 3.11+ targets.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "subclass_enum.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="enum_specialized.py",
+        extra_args=["--target-python-version", "3.11"],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "22",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.cli_doc(
+    options=["--no-use-specialized-enum"],
+    input_schema="jsonschema/subclass_enum.json",
+    cli_args=["--target-python-version", "3.11", "--no-use-specialized-enum"],
+    golden_output="jsonschema/enum_specialized_disable.py",
+    related_options=["--use-specialized-enum", "--use-subclass-enum"],
+)
+def test_main_jsonschema_specialized_enums_disabled(output_file: Path) -> None:
+    """Disable specialized enum generation (StrEnum/IntEnum).
+
+    The `--no-use-specialized-enum` flag disables specialized enum types,
+    generating standard `Enum` classes instead of `StrEnum`/`IntEnum`.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "subclass_enum.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="enum_specialized_disable.py",
+        extra_args=["--target-python-version", "3.11", "--no-use-specialized-enum"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--empty-enum-field-name"],
+    input_schema="jsonschema/special_enum.json",
+    cli_args=["--empty-enum-field-name", "empty"],
+    golden_output="jsonschema/special_enum_empty_enum_field_name.py",
+)
+def test_main_jsonschema_special_enum_empty_enum_field_name(output_file: Path) -> None:
+    """Name for empty string enum field values.
+
+    The `--empty-enum-field-name` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "special_enum.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="special_enum_empty_enum_field_name.py",
+        extra_args=["--empty-enum-field-name", "empty"],
+    )
+
+
+@pytest.mark.benchmark
+def test_main_jsonschema_special_field_name(output_file: Path) -> None:
+    """Test special field name handling."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "special_field_name.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="special_field_name.py",
+    )
+
+
+def test_main_jsonschema_complex_one_of(output_file: Path) -> None:
+    """Test complex oneOf schemas."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "complex_one_of.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="complex_one_of.py",
+    )
+
+
+def test_main_jsonschema_complex_any_of(output_file: Path) -> None:
+    """Test complex anyOf schemas."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "complex_any_of.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="complex_any_of.py",
+    )
+
+
+def test_main_jsonschema_combine_one_of_object(output_file: Path) -> None:
+    """Test combining oneOf with objects."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "combine_one_of_object.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="combine_one_of_object.py",
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.parametrize(
+    ("union_mode", "output_model", "expected_output"),
+    [
+        (None, "pydantic.BaseModel", "combine_any_of_object.py"),
+        (None, "pydantic_v2.BaseModel", "combine_any_of_object_v2.py"),
+        (
+            "left_to_right",
+            "pydantic_v2.BaseModel",
+            "combine_any_of_object_left_to_right.py",
+        ),
+    ],
+)
+@pytest.mark.cli_doc(
+    options=["--union-mode"],
+    input_schema="jsonschema/combine_any_of_object.json",
+    cli_args=["--union-mode", "left_to_right", "--output-model-type", "pydantic_v2.BaseModel"],
+    golden_output="jsonschema/combine_any_of_object_left_to_right.py",
+)
+def test_main_jsonschema_combine_any_of_object(
+    union_mode: str | None, output_model: str, expected_output: str, output_file: Path
+) -> None:
+    """Union mode for combining anyOf/oneOf schemas (smart or left_to_right).
+
+    The `--union-mode` flag configures the code generation behavior.
+    """
+    extra_args = ["--output-model-type", output_model]
+    if union_mode is not None:
+        extra_args.extend(["--union-mode", union_mode])
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "combine_any_of_object.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=extra_args,
+    )
+
+
+@pytest.mark.benchmark
+@pytest.mark.parametrize(
+    ("extra_args", "expected_file"),
+    [
+        (["--output-model-type", "pydantic_v2.BaseModel"], "jsonschema_root_model_ordering.py"),
+        (
+            ["--output-model-type", "pydantic_v2.BaseModel", "--keep-model-order"],
+            "jsonschema_root_model_ordering_keep_model_order.py",
+        ),
+    ],
+)
+def test_main_jsonschema_root_model_ordering(output_file: Path, extra_args: list[str], expected_file: str) -> None:
+    """Test RootModel is ordered after the types it references."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "root_model_ordering.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=expected_file,
+        extra_args=extra_args,
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--field-include-all-keys"],
+    input_schema="jsonschema/person.json",
+    cli_args=["--field-include-all-keys"],
+    golden_output="jsonschema/general.py",
+)
+@pytest.mark.benchmark
+def test_main_jsonschema_field_include_all_keys(output_file: Path) -> None:
+    """Include all schema keys in Field() json_schema_extra.
+
+    The `--field-include-all-keys` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "person.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="general.py",
+        extra_args=["--field-include-all-keys"],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "field_extras_field_include_all_keys.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "field_extras_field_include_all_keys_v2.py",
+        ),
+    ],
+)
+@pytest.mark.cli_doc(
+    options=["--field-extra-keys-without-x-prefix"],
+    input_schema="jsonschema/extras.json",
+    cli_args=["--field-include-all-keys", "--field-extra-keys-without-x-prefix", "x-repr"],
+    model_outputs={
+        "pydantic_v1": "main/jsonschema/field_extras_field_include_all_keys.py",
+        "pydantic_v2": "main/jsonschema/field_extras_field_include_all_keys_v2.py",
+    },
+)
+def test_main_jsonschema_field_extras_field_include_all_keys(
+    output_model: str, expected_output: str, output_file: Path
+) -> None:
+    """Include specified schema extension keys in Field() without requiring 'x-' prefix.
+
+    The --field-extra-keys-without-x-prefix option allows you to specify custom
+    schema extension keys that should be included in Pydantic Field() extras without
+    the 'x-' prefix requirement. For example, 'x-repr' in the schema becomes 'repr'
+    in Field(). This is useful for custom schema extensions and vendor-specific metadata.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "extras.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=[
+            "--output-model-type",
+            output_model,
+            "--field-include-all-keys",
+            "--field-extra-keys-without-x-prefix",
+            "x-repr",
+        ],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "field_extras_field_extra_keys.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "field_extras_field_extra_keys_v2.py",
+        ),
+    ],
+)
+@pytest.mark.cli_doc(
+    options=["--field-extra-keys"],
+    input_schema="jsonschema/extras.json",
+    cli_args=["--field-extra-keys", "key2", "--field-extra-keys-without-x-prefix", "x-repr"],
+    model_outputs={
+        "pydantic_v1": "main/jsonschema/field_extras_field_extra_keys.py",
+        "pydantic_v2": "main/jsonschema/field_extras_field_extra_keys_v2.py",
+    },
+)
+def test_main_jsonschema_field_extras_field_extra_keys(
+    output_model: str, expected_output: str, output_file: Path
+) -> None:
+    """Include specific extra keys in Field() definitions.
+
+    The `--field-extra-keys` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "extras.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=[
+            "--output-model-type",
+            output_model,
+            "--field-extra-keys",
+            "key2",
+            "invalid-key-1",
+            "--field-extra-keys-without-x-prefix",
+            "x-repr",
+        ],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "field_extras.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "field_extras_v2.py",
+        ),
+    ],
+)
+def test_main_jsonschema_field_extras(output_model: str, expected_output: str, output_file: Path) -> None:
+    """Test field extras generation."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "extras.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--output-model-type", output_model],
+    )
+
+
+def test_main_jsonschema_custom_base_path(output_file: Path) -> None:
+    """Test custom base path configuration."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "custom_base_path.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="custom_base_path.py",
+    )
+
+
+def test_long_description(output_file: Path) -> None:
+    """Test long description handling."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "long_description.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--wrap-string-literal"],
+    input_schema="jsonschema/long_description.json",
+    cli_args=["--wrap-string-literal"],
+    golden_output="jsonschema/long_description_wrap_string_literal.py",
+)
+def test_long_description_wrap_string_literal(output_file: Path) -> None:
+    """Wrap long string literals across multiple lines.
+
+    The `--wrap-string-literal` flag breaks long string literals (like descriptions)
+    across multiple lines for better readability, instead of having very long
+    single-line strings in the generated code.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "long_description.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=["--wrap-string-literal"],
+    )
+
+
+def test_version(capsys: pytest.CaptureFixture) -> None:
+    """Test version output."""
+    with pytest.raises(SystemExit) as e:
+        run_main_with_args(["--version"])
+    assert e.value.code == Exit.OK
+    captured = capsys.readouterr()
+    assert captured.out != "0.0.0\n"
+    assert not captured.err
+
+
+def test_jsonschema_pattern_properties(output_file: Path) -> None:
+    """Test JSON Schema pattern properties."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "pattern_properties.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="pattern_properties.py",
+    )
+
+
+def test_jsonschema_pattern_properties_field_constraints(output_file: Path) -> None:
+    """Test pattern properties with field constraints."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "pattern_properties.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="pattern_properties_field_constraints.py",
+        extra_args=["--field-constraints"],
+    )
+
+
+def test_jsonschema_titles(output_file: Path) -> None:
+    """Test JSON Schema title handling."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "titles.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="titles.py",
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-title-as-name"],
+    input_schema="jsonschema/titles.json",
+    cli_args=["--use-title-as-name"],
+    golden_output="jsonschema/titles_use_title_as_name.py",
+    related_options=["--class-name"],
+)
+def test_jsonschema_titles_use_title_as_name(output_file: Path) -> None:
+    """Use schema title as the generated class name.
+
+    The `--use-title-as-name` flag uses the `title` property from the schema
+    as the class name instead of deriving it from the property name or path.
+    This is useful when schemas have descriptive titles that should be preserved.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "titles.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="titles_use_title_as_name.py",
+        extra_args=["--use-title-as-name"],
+    )
+
+
+def test_jsonschema_without_titles_use_title_as_name(output_file: Path) -> None:
+    """Test title as name without titles present."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "without_titles.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="without_titles_use_title_as_name.py",
+        extra_args=["--use-title-as-name"],
+    )
+
+
+def test_jsonschema_title_with_dots(output_file: Path) -> None:
+    """Test using title as name when title contains dots (e.g., version numbers)."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "title_with_dots.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="title_with_dots.py",
+        extra_args=["--use-title-as-name"],
+    )
+
+
+def test_main_jsonschema_has_default_value(output_file: Path) -> None:
+    """Test default value handling."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "has_default_value.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="has_default_value.py",
+    )
+
+
+def test_main_jsonschema_boolean_property(output_file: Path) -> None:
+    """Test boolean property generation."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "boolean_property.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="boolean_property.py",
+    )
+
+
+def test_main_jsonschema_modular_default_enum_member(output_dir: Path) -> None:
+    """Test modular enum with default member."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=JSON_SCHEMA_DATA_PATH / "modular_default_enum_member",
+            output_path=output_dir,
+            expected_directory=EXPECTED_JSON_SCHEMA_PATH / "modular_default_enum_member",
+            extra_args=["--set-default-enum-member"],
+        )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] < "22",
+    reason="Installed black doesn't support Python version 3.10",
+)
+def test_main_use_union_operator(output_dir: Path) -> None:
+    """Test union operator usage."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "external_files_in_directory",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "use_union_operator",
+        input_file_type="jsonschema",
+        extra_args=["--use-union-operator"],
+    )
+
+
+@pytest.mark.parametrize("as_module", [True, False])
+def test_treat_dot_as_module(as_module: bool, output_dir: Path) -> None:
+    """Test dot notation as module separator."""
+    path_extension = "treat_dot_as_module" if as_module else "treat_dot_not_as_module"
+    extra_args = ["--treat-dot-as-module"] if as_module else None
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "treat_dot_as_module",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / path_extension,
+        extra_args=extra_args,
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--treat-dot-as-module"],
+    input_schema="jsonschema/treat_dot_as_module_single",
+    cli_args=["--treat-dot-as-module"],
+    golden_output="jsonschema/treat_dot_as_module_single/",
+)
+def test_treat_dot_as_module_single_file(output_dir: Path) -> None:
+    """Treat dots in schema names as module separators.
+
+    The `--treat-dot-as-module` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "treat_dot_as_module_single",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "treat_dot_as_module_single",
+        extra_args=["--treat-dot-as-module"],
+    )
+
+
+def test_main_jsonschema_duplicate_name(output_dir: Path) -> None:
+    """Test duplicate name handling."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "duplicate_name",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "duplicate_name",
+        input_file_type="jsonschema",
+    )
+
+
+def test_main_jsonschema_items_boolean(output_file: Path) -> None:
+    """Test items with boolean values."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "items_boolean.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="items_boolean.py",
+    )
+
+
+def test_main_jsonschema_array_in_additional_properites(output_file: Path) -> None:
+    """Test array in additional properties."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "array_in_additional_properties.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="array_in_additional_properties.py",
+    )
+
+
+def test_main_jsonschema_object_with_only_additional_properties(output_file: Path) -> None:
+    """Test object with only additional properties."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "string_dict.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="string_dict.py",
+    )
+
+
+def test_main_jsonschema_nullable_object(output_file: Path) -> None:
+    """Test nullable object handling."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "nullable_object.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="nullable_object.py",
+    )
+
+
+def test_main_jsonschema_ref_type_has_null(output_file: Path) -> None:
+    """Test that type: [type, null] from $ref schema is propagated."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "ref_type_has_null.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="ref_type_has_null.py",
+        extra_args=["--use-union-operator"],
+    )
+
+
+def test_main_jsonschema_object_has_one_of(output_file: Path) -> None:
+    """Test object with oneOf constraint."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "object_has_one_of.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="object_has_one_of.py",
+    )
+
+
+def test_main_jsonschema_oneof_const_enum(output_file: Path) -> None:
+    """Test oneOf with const values generates enum (issue #1925)."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "oneof_const_enum.yaml",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="oneof_const_enum.py",
+    )
+
+
+def test_main_jsonschema_oneof_const_enum_nullable(output_file: Path) -> None:
+    """Test nullable oneOf with const values generates optional enum."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "oneof_const_enum_nullable.yaml",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="oneof_const_enum_nullable.py",
+    )
+
+
+def test_main_jsonschema_oneof_const_enum_nested(output_file: Path) -> None:
+    """Test nested oneOf with const values in properties and array items."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "oneof_const_enum_nested.yaml",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="oneof_const_enum_nested.py",
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--enum-field-as-literal"],
+    input_schema="jsonschema/oneof_const_enum_nested.yaml",
+    cli_args=["--enum-field-as-literal", "all"],
+    golden_output="main/jsonschema/oneof_const_enum_nested_literal.py",
+)
+def test_main_jsonschema_oneof_const_enum_nested_literal(output_file: Path) -> None:
+    """Generate Literal types instead of Enums for fields with enumerated values.
+
+    The --enum-field-as-literal option replaces Enum classes with Literal types for
+    fields that have a fixed set of allowed values. Use 'all' to convert all enum
+    fields, or 'one' to only convert enums with a single value. This produces more
+    concise type hints and avoids creating Enum classes when not needed.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "oneof_const_enum_nested.yaml",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="oneof_const_enum_nested_literal.py",
+        extra_args=["--enum-field-as-literal", "all"],
+    )
+
+
+def test_main_jsonschema_oneof_const_enum_int(output_file: Path) -> None:
+    """Test oneOf with integer const values generates IntEnum."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "oneof_const_enum_int.yaml",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="oneof_const_enum_int.py",
+    )
+
+
+def test_main_jsonschema_oneof_const_enum_type_list(output_file: Path) -> None:
+    """Test oneOf with const values and type list (nullable)."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "oneof_const_enum_type_list.yaml",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="oneof_const_enum_type_list.py",
+    )
+
+
+def test_main_jsonschema_oneof_const_enum_literal(output_file: Path) -> None:
+    """Test oneOf with const values as Literal type."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "oneof_const_enum.yaml",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="oneof_const_enum_literal.py",
+        extra_args=["--enum-field-as-literal", "all"],
+    )
+
+
+def test_main_jsonschema_oneof_const_enum_infer_type(output_file: Path) -> None:
+    """Test oneOf with const values and inferred type."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "oneof_const_enum_infer_type.yaml",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="oneof_const_enum_infer_type.py",
+    )
+
+
+def test_main_jsonschema_oneof_const_enum_bool(output_file: Path) -> None:
+    """Test oneOf with boolean const values."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "oneof_const_enum_bool.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="oneof_const_enum_bool.py",
+    )
+
+
+def test_main_jsonschema_oneof_const_enum_float(output_file: Path) -> None:
+    """Test oneOf with float const values."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "oneof_const_enum_float.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="oneof_const_enum_float.py",
+    )
+
+
+def test_main_jsonschema_anyof_const_enum_nested(output_file: Path) -> None:
+    """Test nested anyOf with const values in properties and array items."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "anyof_const_enum_nested.yaml",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="anyof_const_enum_nested.py",
+    )
+
+
+def test_main_jsonschema_anyof_const_enum_nested_literal(output_file: Path) -> None:
+    """Test nested anyOf const with --enum-field-as-literal all."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "anyof_const_enum_nested.yaml",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="anyof_const_enum_nested_literal.py",
+        extra_args=["--enum-field-as-literal", "all"],
+    )
+
+
+def test_main_jsonschema_oneof_const_mixed_with_ref(output_file: Path) -> None:
+    """Test oneOf with mixed const and $ref falls back to Union."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "oneof_const_mixed_with_ref.yaml",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="oneof_const_mixed_with_ref.py",
+    )
+
+
+def test_main_jsonschema_oneof_const_with_properties(output_file: Path) -> None:
+    """Test oneOf with const and properties falls back to Union."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "oneof_const_with_properties.yaml",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="oneof_const_with_properties.py",
+    )
+
+
+def test_main_jsonschema_oneof_const_enum_type_list_no_null(output_file: Path) -> None:
+    """Test oneOf const with type list without null."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "oneof_const_enum_type_list_no_null.yaml",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="oneof_const_enum_type_list_no_null.py",
+    )
+
+
+def test_main_jsonschema_oneof_const_enum_object(output_file: Path) -> None:
+    """Test oneOf with object const values for type inference coverage."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "oneof_const_enum_object.yaml",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="oneof_const_enum_object.py",
+    )
+
+
+def test_main_jsonschema_json_pointer_array(output_file: Path) -> None:
+    """Test JSON pointer with arrays."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "json_pointer_array.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="json_pointer_array.py",
+    )
+
+
+@pytest.mark.filterwarnings("error")
+def test_main_disable_warnings_config(capsys: pytest.CaptureFixture[str], output_file: Path) -> None:
+    """Test disable warnings configuration."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "person.json",
+        output_path=output_file,
+        capsys=capsys,
+        assert_no_stderr=True,
+        input_file_type="jsonschema",
+        extra_args=[
+            "--use-union-operator",
+            "--target-python-version",
+            f"3.{MIN_VERSION}",
+            "--disable-warnings",
+        ],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--disable-warnings"],
+    input_schema="jsonschema/all_of_with_object.json",
+    cli_args=["--disable-warnings"],
+    golden_output="main/jsonschema/all_of_with_object.py",
+)
+@pytest.mark.filterwarnings("error")
+def test_main_disable_warnings(capsys: pytest.CaptureFixture[str], output_file: Path) -> None:
+    """Suppress warning messages during code generation.
+
+    The --disable-warnings option silences all warning messages that the generator
+    might emit during processing (e.g., about unsupported features, ambiguous schemas,
+    or potential issues). Useful for clean output in CI/CD pipelines.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "all_of_with_object.json",
+        output_path=output_file,
+        capsys=capsys,
+        assert_no_stderr=True,
+        input_file_type="jsonschema",
+        extra_args=["--disable-warnings"],
+    )
+
+
+def test_main_jsonschema_pattern_properties_by_reference(output_file: Path) -> None:
+    """Test pattern properties by reference."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "pattern_properties_by_reference.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="pattern_properties_by_reference.py",
+    )
+
+
+def test_main_dataclass_field(output_file: Path) -> None:
+    """Test dataclass field generation."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "user.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=["--output-model-type", "dataclasses.dataclass"],
+    )
+
+
+@pytest.mark.skipif(
+    not is_supported_in_black(PythonVersion.PY_312),
+    reason="Black does not support Python 3.12",
+)
+def test_main_dataclass_field_py312(output_file: Path) -> None:
+    """Test dataclass field generation with Python 3.12 type statement."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "user.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=[
+            "--output-model-type",
+            "dataclasses.dataclass",
+            "--target-python-version",
+            "3.12",
+        ],
+    )
+
+
+def test_main_jsonschema_enum_root_literal(output_file: Path) -> None:
+    """Test enum root with literal type."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "enum_in_root" / "enum_in_root.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="root_in_enum.py",
+        extra_args=[
+            "--use-schema-description",
+            "--use-title-as-name",
+            "--field-constraints",
+            "--target-python-version",
+            "3.9",
+            "--allow-population-by-field-name",
+            "--strip-default-none",
+            "--use-default",
+            "--enum-field-as-literal",
+            "all",
+            "--snake-case-field",
+            "--collapse-root-models",
+        ],
+    )
+
+
+def test_main_nullable_any_of(output_file: Path) -> None:
+    """Test nullable anyOf schemas."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "nullable_any_of.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=["--field-constraints"],
+    )
+
+
+def test_main_nullable_any_of_use_union_operator(output_file: Path) -> None:
+    """Test nullable anyOf with union operator."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "nullable_any_of.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--field-constraints", "--use-union-operator"],
+    )
+
+
+def test_main_nested_all_of(output_file: Path) -> None:
+    """Test nested allOf schemas."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "nested_all_of.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+def test_main_all_of_any_of(output_dir: Path) -> None:
+    """Test combination of allOf and anyOf."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "all_of_any_of",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "all_of_any_of",
+        input_file_type="jsonschema",
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-double-quotes"],
+    input_schema="jsonschema/all_of_any_of_base_class_ref.json",
+    cli_args=["--use-double-quotes"],
+    golden_output="main/jsonschema/all_of_any_of_base_class_ref.py",
+)
+def test_main_all_of_any_of_base_class_ref(output_file: Path) -> None:
+    """Use double quotes for string literals in generated code.
+
+    The --use-double-quotes option formats all string literals in the generated
+    Python code with double quotes instead of the default single quotes. This
+    helps maintain consistency with codebases that prefer double-quote formatting.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "all_of_any_of_base_class_ref.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=["--snake-case-field", "--use-double-quotes", "--reuse-model"],
+    )
+
+
+def test_main_all_of_one_of(output_dir: Path) -> None:
+    """Test combination of allOf and oneOf."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "all_of_one_of",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "all_of_one_of",
+        input_file_type="jsonschema",
+    )
+
+
+def test_main_null(output_file: Path) -> None:
+    """Test null type handling."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "null.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+@pytest.mark.skipif(
+    version.parse(black.__version__) < version.parse("23.3.0"),
+    reason="Require Black version 23.3.0 or later ",
+)
+def test_main_typed_dict_special_field_name_with_inheritance_model(output_file: Path) -> None:
+    """Test TypedDict special field names with inheritance."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "special_field_name_with_inheritance_model.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--output-model-type", "typing.TypedDict", "--target-python-version", "3.11"],
+    )
+
+
+@pytest.mark.skipif(
+    version.parse(black.__version__) < version.parse("23.3.0"),
+    reason="Require Black version 23.3.0 or later ",
+)
+def test_main_typed_dict_not_required_nullable(output_file: Path) -> None:
+    """Test main function writing to TypedDict, with combos of Optional/NotRequired."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "not_required_nullable.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--output-model-type", "typing.TypedDict", "--target-python-version", "3.11"],
+    )
+
+
+def test_main_typed_dict_const(output_file: Path) -> None:
+    """Test main function writing to TypedDict with const fields."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "const.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--output-model-type", "typing.TypedDict", "--target-python-version", "3.10"],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] < "24",
+    reason="Installed black doesn't support the new style",
+)
+def test_main_typed_dict_additional_properties(output_file: Path) -> None:
+    """Test main function writing to TypedDict with additional properties, and no other fields."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "string_dict.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="typed_dict_with_only_additional_properties.py",
+        extra_args=["--output-model-type", "typing.TypedDict", "--target-python-version", "3.11"],
+    )
+
+
+def test_main_dataclass_const(output_file: Path) -> None:
+    """Test main function writing to dataclass with const fields."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "const.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--output-model-type", "dataclasses.dataclass", "--target-python-version", "3.10"],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic_v2.BaseModel",
+            "discriminator_literals.py",
+        ),
+        (
+            "msgspec.Struct",
+            "discriminator_literals_msgspec.py",
+        ),
+    ],
+)
+@pytest.mark.skipif(
+    int(black.__version__.split(".")[0]) < 24,
+    reason="Installed black doesn't support the new style",
+)
+def test_main_jsonschema_discriminator_literals(
+    output_model: str, expected_output: str, min_version: str, output_file: Path
+) -> None:
+    """Test discriminator with literal types."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "discriminator_literals.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--output-model-type", output_model, "--target-python", min_version],
+    )
+
+
+@pytest.mark.skipif(
+    int(black.__version__.split(".")[0]) < 24,
+    reason="Installed black doesn't support the new style",
+)
+def test_main_jsonschema_discriminator_literals_with_no_mapping(min_version: str, output_file: Path) -> None:
+    """Test discriminator literals without mapping."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "discriminator_no_mapping.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="discriminator_no_mapping.py",
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel", "--target-python", min_version],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic_v2.BaseModel",
+            "discriminator_with_external_reference.py",
+        ),
+        pytest.param(
+            "msgspec.Struct",
+            "discriminator_with_external_reference_msgspec.py",
+            marks=MSGSPEC_LEGACY_BLACK_SKIP,
+        ),
+    ],
+)
+def test_main_jsonschema_external_discriminator(
+    output_model: str, expected_output: str, min_version: str, output_file: Path
+) -> None:
+    """Test external discriminator references."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "discriminator_with_external_reference" / "inner_folder" / "schema.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--output-model-type", output_model, "--target-python", min_version],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "discriminator_with_external_references_folder",
+        ),
+        pytest.param(
+            "msgspec.Struct",
+            "discriminator_with_external_references_folder_msgspec",
+            marks=MSGSPEC_LEGACY_BLACK_SKIP,
+        ),
+    ],
+)
+def test_main_jsonschema_external_discriminator_folder(
+    output_model: str, expected_output: str, min_version: str, output_dir: Path
+) -> None:
+    """Test external discriminator in folder structure."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "discriminator_with_external_reference",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / expected_output,
+        extra_args=[
+            "--output-model-type",
+            output_model,
+            "--target-python",
+            min_version,
+        ],
+    )
+
+
+def test_main_duplicate_field_constraints(output_dir: Path) -> None:
+    """Test duplicate field constraint handling."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "duplicate_field_constraints",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "duplicate_field_constraints",
+        input_file_type="jsonschema",
+        extra_args=[
+            "--collapse-root-models",
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_duplicate_field_constraints_msgspec(min_version: str, output_dir: Path) -> None:
+    """Test duplicate field constraints with msgspec."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "duplicate_field_constraints",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "duplicate_field_constraints_msgspec",
+        input_file_type="jsonschema",
+        extra_args=[
+            "--output-model-type",
+            "msgspec.Struct",
+            "--target-python-version",
+            min_version,
+        ],
+    )
+
+
+def test_main_dataclass_field_defs(output_file: Path) -> None:
+    """Test dataclass field definitions."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "user_defs.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="dataclass_field.py",
+        extra_args=["--output-model-type", "dataclasses.dataclass"],
+        transform=lambda s: s.replace("filename:  user_defs.json", "filename:  user.json"),
+    )
+
+
+def test_main_dataclass_default(output_file: Path) -> None:
+    """Test dataclass default values."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "user_default.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="dataclass_field_default.py",
+        extra_args=["--output-model-type", "dataclasses.dataclass"],
+    )
+
+
+def test_main_all_of_ref_self(output_file: Path) -> None:
+    """Test allOf with self-reference."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "all_of_ref_self.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_array_field_constraints(output_file: Path) -> None:
+    """Test array field constraints."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "array_field_constraints.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=["--field-constraints"],
+    )
+
+
+def test_all_of_use_default(output_file: Path) -> None:
+    """Test allOf with use-default option."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "all_of_default.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--use-default"],
+    )
+
+
+def test_main_root_one_of(output_dir: Path) -> None:
+    """Test root-level oneOf schemas."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "root_one_of",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "root_one_of",
+        input_file_type="jsonschema",
+    )
+
+
+def test_one_of_with_sub_schema_array_item(output_file: Path) -> None:
+    """Test oneOf with sub-schema array items."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "one_of_with_sub_schema_array_item.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel"],
+    )
+
+
+def test_main_jsonschema_with_custom_formatters(output_file: Path, tmp_path: Path) -> None:
+    """Test custom formatter integration."""
+    formatter_config = {
+        "license_file": str(Path(__file__).parent.parent.parent / "data/python/custom_formatters/license_example.txt")
+    }
+    formatter_config_path = tmp_path / "formatter_config"
+    formatter_config_path.write_text(json.dumps(formatter_config))
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "person.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="custom_formatters.py",
+        extra_args=[
+            "--custom-formatters",
+            "tests.data.python.custom_formatters.add_license",
+            "--custom-formatters-kwargs",
+            str(formatter_config_path),
+        ],
+    )
+
+
+def test_main_imports_correct(output_dir: Path) -> None:
+    """Test correct import generation."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "imports_correct",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "imports_correct",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+        ],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic_v2.BaseModel",
+            "duration_pydantic_v2.py",
+        ),
+        (
+            "msgspec.Struct",
+            "duration_msgspec.py",
+        ),
+    ],
+)
+def test_main_jsonschema_duration(output_model: str, expected_output: str, min_version: str, output_file: Path) -> None:
+    """Test duration type handling."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "duration.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--output-model-type", output_model, "--target-python", min_version],
+    )
+
+
+@pytest.mark.skipif(
+    int(black.__version__.split(".")[0]) < 24,
+    reason="Installed black doesn't support the new style",
+)
+def test_main_jsonschema_keyword_only_msgspec(min_version: str, output_file: Path) -> None:
+    """Test msgspec keyword-only arguments."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "discriminator_literals.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="discriminator_literals_msgspec_keyword_only.py",
+        extra_args=[
+            "--output-model-type",
+            "msgspec.Struct",
+            "--keyword-only",
+            "--target-python-version",
+            min_version,
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    int(black.__version__.split(".")[0]) < 24,
+    reason="Installed black doesn't support the new style",
+)
+def test_main_jsonschema_keyword_only_msgspec_with_extra_data(min_version: str, output_file: Path) -> None:
+    """Test msgspec keyword-only with extra data."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "discriminator_literals.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="discriminator_literals_msgspec_keyword_only_omit_defaults.py",
+        extra_args=[
+            "--output-model-type",
+            "msgspec.Struct",
+            "--keyword-only",
+            "--target-python-version",
+            min_version,
+            "--extra-template-data",
+            str(JSON_SCHEMA_DATA_PATH / "extra_data_msgspec.json"),
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    int(black.__version__.split(".")[0]) < 24,
+    reason="Installed black doesn't support the new style",
+)
+def test_main_jsonschema_openapi_keyword_only_msgspec_with_extra_data(tmp_path: Path) -> None:
+    """Test OpenAPI msgspec keyword-only with extra data."""
+    extra_data = json.loads((JSON_SCHEMA_DATA_PATH / "extra_data_msgspec.json").read_text())
+    output_file: Path = tmp_path / "output.py"
+    generate(
+        input_=JSON_SCHEMA_DATA_PATH / "discriminator_literals.json",
+        output=output_file,
+        input_file_type=InputFileType.JsonSchema,
+        output_model_type=DataModelType.MsgspecStruct,
+        keyword_only=True,
+        target_python_version=PythonVersionMin,
+        extra_template_data=defaultdict(dict, extra_data),
+        # Following values are implied by `msgspec.Struct` in the CLI
+        use_annotated=True,
+        field_constraints=True,
+    )
+    assert_file_content(output_file, "discriminator_literals_msgspec_keyword_only_omit_defaults.py")
+
+
+@MSGSPEC_LEGACY_BLACK_SKIP
+def test_main_msgspec_discriminator_with_type_string(output_file: Path) -> None:
+    """Test msgspec Struct generation with discriminator using type: string + const."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "discriminator_with_type_string.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="discriminator_with_type_string_msgspec.py",
+        extra_args=[
+            "--output-model-type",
+            "msgspec.Struct",
+            "--target-python-version",
+            "3.10",
+        ],
+    )
+
+
+@MSGSPEC_LEGACY_BLACK_SKIP
+def test_main_msgspec_discriminator_with_meta(output_file: Path) -> None:
+    """Test msgspec Struct generation with discriminator ClassVar having Meta constraints."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "discriminator_with_meta_msgspec.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="discriminator_with_meta_msgspec.py",
+        extra_args=[
+            "--output-model-type",
+            "msgspec.Struct",
+            "--target-python-version",
+            "3.10",
+        ],
+    )
+
+
+@MSGSPEC_LEGACY_BLACK_SKIP
+def test_main_msgspec_discriminator_without_annotated(output_file: Path) -> None:
+    """Test msgspec Struct discriminator generates ClassVar even without use_annotated."""
+    generate(
+        JSON_SCHEMA_DATA_PATH / "discriminator_with_type_string.json",
+        output=output_file,
+        output_model_type=DataModelType.MsgspecStruct,
+        target_python_version=PythonVersion.PY_310,
+        use_annotated=False,
+    )
+    assert_file_content(output_file, "discriminator_with_type_string_msgspec_no_annotated.py")
+
+
+@MSGSPEC_LEGACY_BLACK_SKIP
+def test_main_msgspec_null_field(output_file: Path) -> None:
+    """Test msgspec Struct generation with null type fields."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "msgspec_null_field.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=[
+            "--output-model-type",
+            "msgspec.Struct",
+            "--use-union-operator",
+            "--target-python-version",
+            "3.10",
+        ],
+    )
+
+
+@MSGSPEC_LEGACY_BLACK_SKIP
+def test_main_msgspec_falsy_defaults(output_file: Path) -> None:
+    """Test msgspec Struct generation preserves falsy default values (0, '', False)."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "msgspec_falsy_defaults.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=[
+            "--output-model-type",
+            "msgspec.Struct",
+            "--use-union-operator",
+            "--target-python-version",
+            "3.10",
+        ],
+    )
+
+
+def test_main_invalid_import_name(output_dir: Path) -> None:
+    """Test invalid import name handling."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "invalid_import_name",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "invalid_import_name",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+        ],
+    )
+
+
+def test_main_alias_import_alias(output_dir: Path) -> None:
+    """Ensure imports with aliases are retained after cleanup."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "alias_import_alias",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "alias_import_alias",
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic_v2.BaseModel",
+            "field_has_same_name_v2.py",
+        ),
+        (
+            "pydantic.BaseModel",
+            "field_has_same_name.py",
+        ),
+    ],
+)
+def test_main_jsonschema_field_has_same_name(output_model: str, expected_output: str, output_file: Path) -> None:
+    """Test field with same name as parent."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "field_has_same_name.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--output-model-type", output_model],
+    )
+
+
+@pytest.mark.benchmark
+def test_main_jsonschema_required_and_any_of_required(output_file: Path) -> None:
+    """Test required field with anyOf required."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "required_and_any_of_required.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="required_and_any_of_required.py",
+    )
+
+
+def test_main_json_pointer_escaped_segments(tmp_path: Path) -> None:
+    """Test JSON pointer with escaped segments."""
+    schema = {
+        "definitions": {
+            "foo/bar": {"type": "object", "properties": {"value": {"type": "string"}}},
+            "baz~qux": {"type": "object", "properties": {"value": {"type": "integer"}}},
+        },
+        "properties": {
+            "foo_bar": {"$ref": "#/definitions/foo~1bar"},
+            "baz_qux": {"$ref": "#/definitions/baz~0qux"},
+        },
+        "type": "object",
+    }
+    expected = (
+        "# generated by datamodel-codegen:\n"
+        "#   filename: input.json\n"
+        "#   timestamp: 2019-07-26T00:00:00+00:00\n\n"
+        "from __future__ import annotations\n\n"
+        "from typing import Optional\n\n"
+        "from pydantic import BaseModel\n\n"
+        "class FooBar(BaseModel):\n    value: Optional[str] = None\n\n"
+        "class BazQux(BaseModel):\n    value: Optional[int] = None\n\n"
+        "class Baz0qux(BaseModel):\n    value: Optional[int] = None\n\n"
+        "class Foo1bar(BaseModel):\n    value: Optional[str] = None\n\n"
+        "class Model(BaseModel):\n    foo_bar: Optional[Foo1bar] = None\n    baz_qux: Optional[Baz0qux] = None\n"
+    )
+
+    input_file = tmp_path / "input.json"
+    output_file = tmp_path / "output.py"
+    input_file.write_text(json.dumps(schema))
+    run_main_and_assert(
+        input_path=input_file,
+        output_path=output_file,
+        expected_output=expected,
+        input_file_type="jsonschema",
+        ignore_whitespace=True,
+    )
+
+
+def test_main_json_pointer_percent_encoded_segments(tmp_path: Path) -> None:
+    """Test JSON pointer with percent-encoded segments."""
+    schema = {
+        "definitions": {
+            "foo/bar": {"type": "object", "properties": {"value": {"type": "string"}}},
+            "baz~qux": {"type": "object", "properties": {"value": {"type": "integer"}}},
+            "space key": {"type": "object", "properties": {"value": {"type": "boolean"}}},
+        },
+        "properties": {
+            "foo_bar": {"$ref": "#/definitions/foo%2Fbar"},
+            "baz_qux": {"$ref": "#/definitions/baz%7Equx"},
+            "space_key": {"$ref": "#/definitions/space%20key"},
+        },
+        "type": "object",
+    }
+    expected = (
+        "# generated by datamodel-codegen:\n"
+        "#   filename: input.json\n"
+        "#   timestamp: 2019-07-26T00:00:00+00:00\n\n"
+        "from __future__ import annotations\n\n"
+        "from typing import Optional\n\n"
+        "from pydantic import BaseModel\n\n"
+        "class FooBar(BaseModel):\n    value: Optional[str] = None\n\n"
+        "class BazQux(BaseModel):\n    value: Optional[int] = None\n\n"
+        "class SpaceKey(BaseModel):\n    value: Optional[bool] = None\n\n"
+        "class Baz7Equx(BaseModel):\n    value: Optional[int] = None\n\n"
+        "class Foo2Fbar(BaseModel):\n    value: Optional[str] = None\n\n"
+        "class Space20key(BaseModel):\n    value: Optional[bool] = None\n\n"
+        "class Model(BaseModel):\n    foo_bar: Optional[Foo2Fbar] = None\n"
+        "    baz_qux: Optional[Baz7Equx] = None\n"
+        "    space_key: Optional[Space20key] = None\n"
+    )
+
+    input_file = tmp_path / "input.json"
+    output_file = tmp_path / "output.py"
+    input_file.write_text(json.dumps(schema))
+    run_main_and_assert(
+        input_path=input_file,
+        output_path=output_file,
+        expected_output=expected,
+        input_file_type="jsonschema",
+        ignore_whitespace=True,
+    )
+
+
+@pytest.mark.parametrize(
+    ("extra_fields", "output_model", "expected_output"),
+    [
+        (
+            "allow",
+            "pydantic.BaseModel",
+            "extra_fields_allow.py",
+        ),
+        (
+            "forbid",
+            "pydantic.BaseModel",
+            "extra_fields_forbid.py",
+        ),
+        (
+            "ignore",
+            "pydantic.BaseModel",
+            "extra_fields_ignore.py",
+        ),
+        (
+            "allow",
+            "pydantic_v2.BaseModel",
+            "extra_fields_v2_allow.py",
+        ),
+        (
+            "forbid",
+            "pydantic_v2.BaseModel",
+            "extra_fields_v2_forbid.py",
+        ),
+        (
+            "ignore",
+            "pydantic_v2.BaseModel",
+            "extra_fields_v2_ignore.py",
+        ),
+    ],
+)
+def test_main_extra_fields(extra_fields: str, output_model: str, expected_output: str, output_file: Path) -> None:
+    """Test extra fields configuration."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "extra_fields.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--extra-fields", extra_fields, "--output-model-type", output_model],
+    )
+
+
+def test_main_jsonschema_same_name_objects(output_file: Path) -> None:
+    """Test objects with same name (see issue #2460)."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "same_name_objects.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="same_name_objects.py",
+    )
+
+
+def test_main_jsonschema_forwarding_reference_collapse_root(output_dir: Path) -> None:
+    """Test forwarding reference with collapsed root (see issue #1466)."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "forwarding_reference",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "forwarding_reference",
+        input_file_type="jsonschema",
+        extra_args=["--collapse-root-models"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-type-alias"],
+    input_schema="jsonschema/type_alias.json",
+    cli_args=["--use-type-alias"],
+    golden_output="jsonschema/type_alias.py",
+)
+def test_main_jsonschema_type_alias(output_file: Path) -> None:
+    """Generate TypeAlias for root models instead of wrapper classes.
+
+    The `--use-type-alias` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "type_alias.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="type_alias.py",
+        extra_args=["--use-type-alias"],
+    )
+
+
+@pytest.mark.skipif(
+    int(black.__version__.split(".")[0]) < 23,
+    reason="Installed black doesn't support the new 'type' statement",
+)
+def test_main_jsonschema_type_alias_py312(output_file: Path) -> None:
+    """Test that type statement syntax is generated for Python 3.12+ with Pydantic v2."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "type_alias.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="type_alias_py312.py",
+        extra_args=[
+            "--use-type-alias",
+            "--target-python-version",
+            "3.12",
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+        ],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-field-description"],
+    input_schema="jsonschema/type_alias.json",
+    cli_args=["--use-type-alias", "--use-field-description"],
+    golden_output="jsonschema/type_alias_with_field_description.py",
+)
+def test_main_jsonschema_type_alias_with_field_description(output_file: Path) -> None:
+    """Include schema descriptions as Field docstrings.
+
+    The `--use-field-description` flag extracts the `description` property from
+    schema fields and includes them as docstrings or Field descriptions in the
+    generated models, preserving documentation from the original schema.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "type_alias.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="type_alias_with_field_description.py",
+        extra_args=["--use-type-alias", "--use-field-description"],
+    )
+
+
+@pytest.mark.skipif(
+    int(black.__version__.split(".")[0]) < 23,
+    reason="Installed black doesn't support the new 'type' statement",
+)
+def test_main_jsonschema_type_alias_with_field_description_py312(output_file: Path) -> None:
+    """Test that type statement syntax is generated with field descriptions for Python 3.12+ and Pydantic v2."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "type_alias.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="type_alias_with_field_description_py312.py",
+        extra_args=[
+            "--use-type-alias",
+            "--use-field-description",
+            "--target-python-version",
+            "3.12",
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+        ],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--type-mappings"],
+    input_schema="jsonschema/type_mappings.json",
+    cli_args=["--output-model-type", "pydantic_v2.BaseModel", "--type-mappings", "binary=string"],
+    golden_output="jsonschema/type_mappings.py",
+)
+def test_main_jsonschema_type_mappings(output_file: Path) -> None:
+    """Override default type mappings for schema formats.
+
+    The `--type-mappings` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "type_mappings.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="type_mappings.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--type-mappings",
+            "binary=string",
+        ],
+    )
+
+
+def test_main_jsonschema_type_mappings_with_type_prefix(output_file: Path) -> None:
+    """Test --type-mappings option with type+format syntax."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "type_mappings.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="type_mappings.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--type-mappings",
+            "string+binary=string",
+        ],
+    )
+
+
+def test_main_jsonschema_type_mappings_to_type_default(output_file: Path) -> None:
+    """Test --type-mappings option mapping to a type's default (e.g., binary=integer)."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "type_mappings.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="type_mappings_to_integer.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--type-mappings",
+            "binary=integer",
+        ],
+    )
+
+
+def test_main_jsonschema_type_mappings_to_boolean(output_file: Path) -> None:
+    """Test --type-mappings option mapping to a top-level type (e.g., binary=boolean)."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "type_mappings.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="type_mappings_to_boolean.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--type-mappings",
+            "binary=boolean",
+        ],
+    )
+
+
+def test_main_jsonschema_type_mappings_invalid_format(output_file: Path) -> None:
+    """Test --type-mappings option with invalid format raises error."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "type_mappings.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        expected_exit=Exit.ERROR,
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--type-mappings",
+            "invalid_without_equals",
+        ],
+        expected_stderr_contains="Invalid type mapping format",
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--reuse-scope"],
+    input_schema="jsonschema/reuse_scope_tree",
+    cli_args=["--reuse-model", "--reuse-scope", "tree"],
+    golden_output="jsonschema/reuse_scope_tree",
+)
+def test_main_jsonschema_reuse_scope_tree(output_dir: Path) -> None:
+    """Scope for model reuse detection (root or tree).
+
+    The `--reuse-scope` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "reuse_scope_tree",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "reuse_scope_tree",
+        input_file_type="jsonschema",
+        extra_args=["--reuse-model", "--reuse-scope", "tree"],
+    )
+
+
+def test_main_jsonschema_reuse_scope_tree_enum(output_dir: Path) -> None:
+    """Test --reuse-scope=tree to deduplicate enum models across multiple files."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "reuse_scope_tree_enum",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "reuse_scope_tree_enum",
+        input_file_type="jsonschema",
+        extra_args=["--reuse-model", "--reuse-scope", "tree"],
+    )
+
+
+def test_main_jsonschema_reuse_scope_tree_warning(capsys: pytest.CaptureFixture[str], output_dir: Path) -> None:
+    """Test warning when --reuse-scope=tree is used without --reuse-model."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "reuse_scope_tree",
+        output_path=output_dir,
+        input_file_type="jsonschema",
+        extra_args=["--reuse-scope", "tree"],
+        capsys=capsys,
+        expected_stderr_contains="Warning: --reuse-scope=tree has no effect without --reuse-model",
+    )
+
+
+def test_main_jsonschema_reuse_scope_tree_no_dup(output_dir: Path) -> None:
+    """Test --reuse-scope=tree when there are no duplicate models."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "reuse_scope_tree_no_dup",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "reuse_scope_tree_no_dup",
+        input_file_type="jsonschema",
+        extra_args=["--reuse-model", "--reuse-scope", "tree"],
+    )
+
+
+def test_main_jsonschema_reuse_scope_tree_self_ref(output_dir: Path) -> None:
+    """Test --reuse-scope=tree with self-referencing models."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "reuse_scope_tree_self_ref",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "reuse_scope_tree_self_ref",
+        input_file_type="jsonschema",
+        extra_args=["--reuse-model", "--reuse-scope", "tree"],
+    )
+
+
+def test_main_jsonschema_reuse_scope_tree_conflict(capsys: pytest.CaptureFixture[str], output_dir: Path) -> None:
+    """Test --reuse-scope=tree error when schema file name conflicts with shared module."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "reuse_scope_tree_conflict",
+        output_path=output_dir,
+        input_file_type="jsonschema",
+        extra_args=["--reuse-model", "--reuse-scope", "tree"],
+        expected_exit=Exit.ERROR,
+        capsys=capsys,
+        expected_stderr_contains="Schema file or directory 'shared' conflicts with the shared module name",
+    )
+
+
+def test_main_jsonschema_reuse_scope_tree_conflict_dir(capsys: pytest.CaptureFixture[str], output_dir: Path) -> None:
+    """Test --reuse-scope=tree error when schema directory name conflicts with shared module."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "reuse_scope_tree_conflict_dir",
+        output_path=output_dir,
+        input_file_type="jsonschema",
+        extra_args=["--reuse-model", "--reuse-scope", "tree"],
+        expected_exit=Exit.ERROR,
+        capsys=capsys,
+        expected_stderr_contains="Schema file or directory 'shared' conflicts with the shared module name",
+    )
+
+
+def test_main_jsonschema_reuse_scope_tree_no_conflict_dir(output_dir: Path) -> None:
+    """Test --reuse-scope=tree does not error when shared/ dir exists but no duplicates."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "reuse_scope_tree_no_conflict_dir",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "reuse_scope_tree_no_conflict_dir",
+        input_file_type="jsonschema",
+        extra_args=["--reuse-model", "--reuse-scope", "tree"],
+    )
+
+
+def test_main_jsonschema_reuse_scope_tree_multi(output_dir: Path) -> None:
+    """Test --reuse-scope=tree with multiple files where canonical is not in first module."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "reuse_scope_tree_multi",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "reuse_scope_tree_multi",
+        input_file_type="jsonschema",
+        extra_args=["--reuse-model", "--reuse-scope", "tree"],
+    )
+
+
+def test_main_jsonschema_reuse_scope_tree_branch(output_dir: Path) -> None:
+    """Test --reuse-scope=tree branch coverage with duplicate in later modules."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "reuse_scope_tree_branch",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "reuse_scope_tree_branch",
+        input_file_type="jsonschema",
+        extra_args=["--reuse-model", "--reuse-scope", "tree"],
+    )
+
+
+def test_main_jsonschema_reuse_scope_tree_dataclass(output_dir: Path) -> None:
+    """Test --reuse-scope=tree with dataclasses output type (supports inheritance)."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "reuse_scope_tree_dataclass",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "reuse_scope_tree_dataclass",
+        input_file_type="jsonschema",
+        extra_args=["--reuse-model", "--reuse-scope", "tree", "--output-model-type", "dataclasses.dataclass"],
+    )
+
+
+def test_main_jsonschema_reuse_scope_tree_typeddict(output_dir: Path) -> None:
+    """Test --reuse-scope=tree with TypedDict output type (no inheritance, direct reference)."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "reuse_scope_tree_typeddict",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "reuse_scope_tree_typeddict",
+        input_file_type="jsonschema",
+        extra_args=["--reuse-model", "--reuse-scope", "tree", "--output-model-type", "typing.TypedDict"],
+    )
+
+
+def test_main_jsonschema_empty_items_array(output_file: Path) -> None:
+    """Test that arrays with empty items ({}) generate List[Any] instead of bare List."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "empty_items_array.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--aliases"],
+    input_schema="jsonschema/hierarchical_aliases.json",
+    cli_args=["--aliases", "aliases/hierarchical_aliases_scoped.json"],
+    golden_output="jsonschema/jsonschema_hierarchical_aliases_scoped.py",
+)
+def test_main_jsonschema_hierarchical_aliases_scoped(output_file: Path) -> None:
+    """Test hierarchical aliases with scoped format (ClassName.field)."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "hierarchical_aliases.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=[
+            "--aliases",
+            str(ALIASES_DATA_PATH / "hierarchical_aliases_scoped.json"),
+        ],
+    )
+
+
+def test_main_jsonschema_multiple_types_with_object(output_file: Path) -> None:
+    """Test multiple types in array including object with properties generates Union type."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "multiple_types_with_object.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+@MSGSPEC_LEGACY_BLACK_SKIP
+def test_main_jsonschema_type_alias_with_circular_ref_to_class_msgspec(min_version: str, output_file: Path) -> None:
+    """Test TypeAlias with circular reference to class generates quoted forward refs."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "type_alias_with_circular_ref_to_class.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="type_alias_with_circular_ref_to_class_msgspec.py",
+        extra_args=[
+            "--output-model-type",
+            "msgspec.Struct",
+            "--target-python-version",
+            min_version,
+        ],
+    )
+
+
+def test_main_jsonschema_enum_object_values(output_file: Path) -> None:
+    """Test that enum with object values uses title/name/const for member names (issue #1620)."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "enum_object_values.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--collapse-root-models"],
+    input_schema="jsonschema/collapse_root_models_empty_union.json",
+    cli_args=["--collapse-root-models"],
+    golden_output="main/jsonschema/jsonschema_collapse_root_models_empty_union.py",
+)
+def test_main_jsonschema_collapse_root_models_empty_union(output_file: Path) -> None:
+    """Inline root model definitions instead of creating separate wrapper classes.
+
+    The --collapse-root-models option simplifies generated code by collapsing
+    root-level models (top-level type aliases) directly into their usage sites.
+    This eliminates unnecessary wrapper classes and produces more concise output,
+    especially useful when schemas define simple root types or type aliases.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "collapse_root_models_empty_union.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=["--collapse-root-models"],
+    )
+
+
+def test_main_jsonschema_collapse_root_models_with_optional(output_file: Path) -> None:
+    """Test that collapse-root-models correctly preserves Optional import when needed."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "collapse_root_models_with_optional.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=["--collapse-root-models"],
+    )
+
+
+def test_main_jsonschema_collapse_root_models_nested_reference(output_file: Path) -> None:
+    """Ensure nested references inside root models still get imported when collapsing."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "collapse_root_models_nested_reference.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=["--collapse-root-models"],
+    )
+
+
+def test_main_jsonschema_file_url_ref(tmp_path: Path) -> None:
+    """Test that file:// URL $ref is resolved correctly."""
+    pet_schema = {
+        "$schema": "https://json-schema.org/draft/2020-12/schema",
+        "type": "object",
+        "properties": {"name": {"type": "string"}, "age": {"type": "integer"}},
+        "required": ["name"],
+    }
+    pet_file = tmp_path / "pet.json"
+    pet_file.write_text(json.dumps(pet_schema))
+
+    main_schema = {
+        "$schema": "https://json-schema.org/draft/2020-12/schema",
+        "type": "object",
+        "properties": {"pet": {"$ref": pet_file.as_uri()}},
+    }
+    main_file = tmp_path / "main.json"
+    main_file.write_text(json.dumps(main_schema))
+
+    expected = (
+        "# generated by datamodel-codegen:\n"
+        "#   filename:  main.json\n\n"
+        "from __future__ import annotations\n\n"
+        "from typing import Optional\n\n"
+        "from pydantic import BaseModel\n\n\n"
+        "class Pet(BaseModel):\n"
+        "    name: str\n"
+        "    age: Optional[int] = None\n\n\n"
+        "class Model(BaseModel):\n"
+        "    pet: Optional[Pet] = None\n"
+    )
+    run_main_and_assert(
+        input_path=main_file,
+        output_path=tmp_path / "output.py",
+        input_file_type="jsonschema",
+        expected_output=expected,
+        ignore_whitespace=True,
+        extra_args=["--disable-timestamp"],
+    )
+
+
+def test_main_jsonschema_file_url_ref_percent_encoded(tmp_path: Path) -> None:
+    """Test that file:// URL with percent-encoded path is resolved correctly."""
+    dir_with_space = tmp_path / "my schemas"
+    dir_with_space.mkdir()
+
+    pet_schema = {
+        "$schema": "https://json-schema.org/draft/2020-12/schema",
+        "type": "object",
+        "properties": {"name": {"type": "string"}},
+    }
+    pet_file = dir_with_space / "pet.json"
+    pet_file.write_text(json.dumps(pet_schema))
+
+    main_schema = {
+        "$schema": "https://json-schema.org/draft/2020-12/schema",
+        "type": "object",
+        "properties": {"pet": {"$ref": pet_file.as_uri()}},
+    }
+    main_file = tmp_path / "main.json"
+    main_file.write_text(json.dumps(main_schema))
+
+    expected = (
+        "# generated by datamodel-codegen:\n"
+        "#   filename:  main.json\n\n"
+        "from __future__ import annotations\n\n"
+        "from typing import Optional\n\n"
+        "from pydantic import BaseModel\n\n\n"
+        "class Pet(BaseModel):\n"
+        "    name: Optional[str] = None\n\n\n"
+        "class Model(BaseModel):\n"
+        "    pet: Optional[Pet] = None\n"
+    )
+    run_main_and_assert(
+        input_path=main_file,
+        output_path=tmp_path / "output.py",
+        input_file_type="jsonschema",
+        expected_output=expected,
+        ignore_whitespace=True,
+        extra_args=["--disable-timestamp"],
+    )
+
+
+@pytest.mark.benchmark
+def test_main_jsonschema_root_model_default_value(output_file: Path) -> None:
+    """Test RootModel default values are wrapped with type constructors."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "root_model_default_value.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="root_model_default_value.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--use-annotated",
+            "--set-default-enum-member",
+        ],
+    )
+
+
+@pytest.mark.benchmark
+def test_main_jsonschema_root_model_default_value_no_annotated(output_file: Path) -> None:
+    """Test RootModel default values without --use-annotated flag."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "root_model_default_value.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="root_model_default_value_no_annotated.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--set-default-enum-member",
+        ],
+    )
+
+
+@pytest.mark.benchmark
+def test_main_jsonschema_root_model_default_value_branches(output_file: Path) -> None:
+    """Test RootModel default value branches."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "root_model_default_value_branches.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="root_model_default_value_branches.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--use-annotated",
+        ],
+    )
+
+
+@pytest.mark.benchmark
+def test_main_jsonschema_root_model_default_value_non_root(output_file: Path) -> None:
+    """Test that non-RootModel references are not wrapped."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "root_model_default_value_non_root.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="root_model_default_value_non_root.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--use-annotated",
+        ],
+    )
+
+
+@pytest.mark.benchmark
+def test_main_jsonschema_extras_in_oneof(output_file: Path) -> None:
+    """Test that extras are preserved in oneOf/anyOf structures (Issue #2403)."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "extras_in_oneof.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="extras_in_oneof.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--field-include-all-keys",
+        ],
+    )
+
+
+def test_main_jsonschema_ref_with_additional_keywords(output_dir: Path) -> None:
+    """Test that $ref combined with additional keywords merges properties (Issue #2330)."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "ref_with_additional_keywords",
+        output_path=output_dir,
+        expected_directory=EXPECTED_JSON_SCHEMA_PATH / "ref_with_additional_keywords",
+        input_file_type="jsonschema",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+        ],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_file"),
+    [
+        ("typing.TypedDict", "reserved_field_name_schema_typed_dict.py"),
+        ("dataclasses.dataclass", "reserved_field_name_schema_dataclass.py"),
+        ("pydantic_v2.BaseModel", "reserved_field_name_schema_pydantic.py"),
+    ],
+)
+@pytest.mark.cli_doc(
+    options=["--output-model-type"],
+    input_schema="jsonschema/reserved_field_name_schema.json",
+    cli_args=["--target-python-version", "3.11"],
+    model_outputs={
+        "typeddict": "main/jsonschema/reserved_field_name_schema_typed_dict.py",
+        "dataclass": "main/jsonschema/reserved_field_name_schema_dataclass.py",
+        "pydantic_v2": "main/jsonschema/reserved_field_name_schema_pydantic.py",
+    },
+)
+@pytest.mark.benchmark
+@LEGACY_BLACK_SKIP
+def test_main_jsonschema_reserved_field_name(output_model: str, expected_file: str, output_file: Path) -> None:
+    """Test reserved field name handling across model types (Issue #1833).
+
+    This demonstrates how 'schema' field is handled:
+    - TypedDict: not renamed (schema is not reserved)
+    - dataclass: not renamed (schema is not reserved)
+    - Pydantic: renamed to 'schema_' with alias (BaseModel.schema conflicts)
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "reserved_field_name_schema.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=expected_file,
+        extra_args=[
+            "--output-model-type",
+            output_model,
+            "--target-python-version",
+            "3.11",
+        ],
+    )
+
+
+@pytest.mark.benchmark
+@LEGACY_BLACK_SKIP
+def test_main_bundled_schema_with_id_local_file(output_file: Path) -> None:
+    """Test bundled schema with $id using local file input (Issue #1798)."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "bundled_schema_with_id.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="bundled_schema_with_id.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+        ],
+    )
+
+
+@pytest.mark.benchmark
+@LEGACY_BLACK_SKIP
+def test_main_bundled_schema_with_id_url(mocker: MockerFixture, output_file: Path) -> None:
+    """Test bundled schema with $id using URL input produces same output as local file."""
+    schema_path = JSON_SCHEMA_DATA_PATH / "bundled_schema_with_id.json"
+
+    mock_response = mocker.Mock()
+    mock_response.text = schema_path.read_text()
+
+    httpx_get_mock = mocker.patch(
+        "httpx.get",
+        return_value=mock_response,
+    )
+
+    run_main_url_and_assert(
+        url="https://cdn.example.com/schemas/bundled_schema_with_id.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="bundled_schema_with_id.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+        ],
+        transform=lambda s: s.replace(
+            "#   filename:  https://cdn.example.com/schemas/bundled_schema_with_id.json",
+            "#   filename:  bundled_schema_with_id.json",
+        ),
+    )
+
+    httpx_get_mock.assert_called_once_with(
+        "https://cdn.example.com/schemas/bundled_schema_with_id.json",
+        headers=None,
+        verify=True,
+        follow_redirects=True,
+        params=None,
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_file"),
+    [
+        ("pydantic.BaseModel", "use_frozen_field_v1.py"),
+        ("pydantic_v2.BaseModel", "use_frozen_field_v2.py"),
+        ("dataclasses.dataclass", "use_frozen_field_dataclass.py"),
+    ],
+)
+@pytest.mark.cli_doc(
+    options=["--use-frozen-field"],
+    input_schema="jsonschema/use_frozen_field.json",
+    cli_args=["--use-frozen-field"],
+    model_outputs={
+        "pydantic_v1": "main/jsonschema/use_frozen_field_v1.py",
+        "pydantic_v2": "main/jsonschema/use_frozen_field_v2.py",
+        "dataclass": "main/jsonschema/use_frozen_field_dataclass.py",
+    },
+)
+@pytest.mark.benchmark
+@LEGACY_BLACK_SKIP
+def test_main_use_frozen_field(output_model: str, expected_file: str, output_file: Path) -> None:
+    """Generate frozen (immutable) field definitions for readOnly properties.
+
+    The `--use-frozen-field` flag generates frozen field definitions:
+    - Pydantic v1: `Field(allow_mutation=False)`
+    - Pydantic v2: `Field(frozen=True)`
+    - Dataclasses: silently ignored (no frozen fields generated)
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "use_frozen_field.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=expected_file,
+        extra_args=["--output-model-type", output_model, "--use-frozen-field"],
+    )
+
+
+@pytest.mark.benchmark
+@LEGACY_BLACK_SKIP
+def test_main_use_frozen_field_no_readonly(output_file: Path) -> None:
+    """Test --use-frozen-field with no readOnly fields produces no frozen fields."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "user.json",  # Has no readOnly fields
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="use_frozen_field_no_readonly.py",
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel", "--use-frozen-field"],
+    )
+
+
+@pytest.mark.benchmark
+def test_main_field_name_shadows_class_name(output_file: Path) -> None:
+    """Test field name shadowing class name is renamed with alias for Pydantic v2."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "field_name_shadows_class_name.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+        ],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--allof-merge-mode"],
+    input_schema="jsonschema/allof_root_model_constraints.json",
+    cli_args=["--allof-merge-mode", "constraints"],
+    golden_output="main/jsonschema/allof_root_model_constraints_merge.py",
+    comparison_output="main/jsonschema/allof_root_model_constraints.py",
+)
+@pytest.mark.benchmark
+def test_main_allof_root_model_constraints_merge(output_file: Path) -> None:
+    """Merge constraints from root model references in allOf schemas.
+
+    The `--allof-merge-mode constraints` merges only constraint properties
+    (minLength, maximum, etc.) from parent schemas referenced in allOf.
+    This ensures child schemas inherit validation constraints while keeping
+    other properties separate.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "allof_root_model_constraints.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="allof_root_model_constraints_merge.py",
+        extra_args=["--allof-merge-mode", "constraints"],
+    )
+
+
+@pytest.mark.benchmark
+def test_main_allof_root_model_constraints_none(output_file: Path) -> None:
+    """Test allOf with root model reference without merging (issue #1901)."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "allof_root_model_constraints.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="allof_root_model_constraints.py",
+        extra_args=["--allof-merge-mode", "none"],
+    )
diff -pruN 0.26.4-3/tests/main/openapi/__init__.py 0.45.0-1/tests/main/openapi/__init__.py
--- 0.26.4-3/tests/main/openapi/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/main/openapi/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1 @@
+"""OpenAPI integration tests package."""
diff -pruN 0.26.4-3/tests/main/openapi/conftest.py 0.45.0-1/tests/main/openapi/conftest.py
--- 0.26.4-3/tests/main/openapi/conftest.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/main/openapi/conftest.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,8 @@
+"""Shared fixtures for OpenAPI tests."""
+
+from __future__ import annotations
+
+from tests.conftest import create_assert_file_content
+from tests.main.conftest import EXPECTED_OPENAPI_PATH
+
+assert_file_content = create_assert_file_content(EXPECTED_OPENAPI_PATH)
diff -pruN 0.26.4-3/tests/main/openapi/test_main_openapi.py 0.45.0-1/tests/main/openapi/test_main_openapi.py
--- 0.26.4-3/tests/main/openapi/test_main_openapi.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/main/openapi/test_main_openapi.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,4225 @@
+"""Tests for OpenAPI/Swagger input file code generation."""
+
+from __future__ import annotations
+
+import contextlib
+import json
+import platform
+import re
+import warnings
+from collections import defaultdict
+from pathlib import Path
+from typing import TYPE_CHECKING
+from unittest.mock import Mock, call
+
+import black
+import pydantic
+import pytest
+from packaging import version
+
+from datamodel_code_generator import (
+    MIN_VERSION,
+    DataModelType,
+    InputFileType,
+    OpenAPIScope,
+    PythonVersionMin,
+    chdir,
+    generate,
+    get_version,
+    inferred_message,
+)
+from datamodel_code_generator.__main__ import Exit
+from tests.conftest import assert_directory_content, freeze_time
+from tests.main.conftest import (
+    BLACK_PY313_SKIP,
+    BLACK_PY314_SKIP,
+    DATA_PATH,
+    LEGACY_BLACK_SKIP,
+    MSGSPEC_LEGACY_BLACK_SKIP,
+    OPEN_API_DATA_PATH,
+    TIMESTAMP,
+    run_main_and_assert,
+    run_main_url_and_assert,
+)
+from tests.main.openapi.conftest import EXPECTED_OPENAPI_PATH, assert_file_content
+
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+
+@pytest.mark.benchmark
+def test_main(output_file: Path) -> None:
+    """Test OpenAPI file code generation."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="general.py",
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_discriminator_enum(output_file: Path) -> None:
+    """Test OpenAPI generation with discriminator enum."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "discriminator_enum.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="discriminator/enum.py",
+        extra_args=["--target-python-version", "3.10", "--output-model-type", "pydantic_v2.BaseModel"],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.cli_doc(
+    options=["--use-enum-values-in-discriminator"],
+    input_schema="openapi/discriminator_enum.yaml",
+    cli_args=["--use-enum-values-in-discriminator", "--output-model-type", "pydantic_v2.BaseModel"],
+    golden_output="openapi/discriminator/enum_use_enum_values.py",
+)
+def test_main_openapi_discriminator_enum_use_enum_values(output_file: Path) -> None:
+    """Use enum values in discriminator mappings for union types.
+
+    The `--use-enum-values-in-discriminator` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "discriminator_enum.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="discriminator/enum_use_enum_values.py",
+        extra_args=[
+            "--target-python-version",
+            "3.10",
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--use-enum-values-in-discriminator",
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_discriminator_enum_use_enum_values_sanitized(output_file: Path) -> None:
+    """Enum values requiring sanitization are rendered as enum members in discriminator."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "discriminator_enum_sanitized.yaml",
+            output_path=output_file,
+            input_file_type="openapi",
+            assert_func=assert_file_content,
+            expected_file="discriminator/enum_use_enum_values_sanitized.py",
+            extra_args=[
+                "--target-python-version",
+                "3.10",
+                "--output-model-type",
+                "pydantic_v2.BaseModel",
+                "--use-enum-values-in-discriminator",
+            ],
+        )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_discriminator_enum_duplicate(output_file: Path) -> None:
+    """Test OpenAPI generation with duplicate discriminator enum."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "discriminator_enum_duplicate.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "discriminator" / "enum_duplicate.py",
+        extra_args=["--target-python-version", "3.10", "--output-model-type", "pydantic_v2.BaseModel"],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_discriminator_enum_single_value(output_file: Path) -> None:
+    """Single-value enum discriminator with allOf inheritance."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "discriminator_enum_single_value.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "discriminator" / "enum_single_value.py",
+        extra_args=["--target-python-version", "3.10", "--output-model-type", "pydantic_v2.BaseModel"],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_discriminator_enum_single_value_use_enum(output_file: Path) -> None:
+    """Single-value enum with allOf + --use-enum-values-in-discriminator."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "discriminator_enum_single_value.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "discriminator" / "enum_single_value_use_enum.py",
+        extra_args=[
+            "--target-python-version",
+            "3.10",
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--use-enum-values-in-discriminator",
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_discriminator_enum_single_value_anyof(output_file: Path) -> None:
+    """Single-value enum discriminator with anyOf - uses enum value, not model name."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "discriminator_enum_single_value_anyof.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "discriminator" / "enum_single_value_anyof.py",
+        extra_args=["--target-python-version", "3.10", "--output-model-type", "pydantic_v2.BaseModel"],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_discriminator_enum_single_value_anyof_use_enum(output_file: Path) -> None:
+    """Single-value enum with anyOf + --use-enum-values-in-discriminator."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "discriminator_enum_single_value_anyof.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "discriminator" / "enum_single_value_anyof_use_enum.py",
+        extra_args=[
+            "--target-python-version",
+            "3.10",
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--use-enum-values-in-discriminator",
+        ],
+    )
+
+
+def test_main_openapi_discriminator_with_properties(output_file: Path) -> None:
+    """Test OpenAPI generation with discriminator properties."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "discriminator_with_properties.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "discriminator" / "with_properties.py",
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel"],
+    )
+
+
+def test_main_openapi_discriminator_allof(output_file: Path) -> None:
+    """Test OpenAPI generation with allOf discriminator polymorphism."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "discriminator_allof.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "discriminator" / "allof.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--snake-case-field",
+            "--use-annotated",
+            "--use-union-operator",
+            "--collapse-root-models",
+        ],
+    )
+
+
+def test_main_openapi_discriminator_allof_no_subtypes(output_file: Path) -> None:
+    """Test OpenAPI generation with discriminator but no allOf subtypes.
+
+    This tests the edge case where a schema has a discriminator but nothing
+    inherits from it using allOf.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "discriminator_allof_no_subtypes.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "discriminator" / "allof_no_subtypes.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+        ],
+    )
+
+
+def test_main_openapi_allof_with_oneof_ref(output_file: Path) -> None:
+    """Test OpenAPI generation with allOf referencing a oneOf schema.
+
+    This tests the case where allOf combines a $ref to a schema with oneOf/discriminator
+    and additional properties. Regression test for issue #1763.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_with_oneof_ref.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "allof_with_oneof_ref.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+        ],
+    )
+
+
+def test_main_openapi_allof_with_anyof_ref(output_file: Path) -> None:
+    """Test OpenAPI generation with allOf referencing an anyOf schema.
+
+    This tests the case where allOf combines a $ref to a schema with anyOf
+    and additional properties.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_with_anyof_ref.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "allof_with_anyof_ref.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+        ],
+    )
+
+
+def test_main_pydantic_basemodel(output_file: Path) -> None:
+    """Test OpenAPI generation with Pydantic BaseModel output."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="general.py",
+        extra_args=["--output-model-type", "pydantic.BaseModel"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--base-class"],
+    input_schema="openapi/api.yaml",
+    cli_args=["--base-class", "custom_module.Base"],
+    golden_output="openapi/base_class.py",
+)
+def test_main_base_class(output_file: Path) -> None:
+    """Specify a custom base class for generated models.
+
+    The `--base-class` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="base_class.py",
+        extra_args=["--base-class", "custom_module.Base"],
+        copy_files=[(DATA_PATH / "pyproject.toml", output_file.parent / "pyproject.toml")],
+    )
+
+
+def test_target_python_version(output_file: Path) -> None:
+    """Test OpenAPI generation with target Python version."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--target-python-version", f"3.{MIN_VERSION}"],
+    )
+
+
+@BLACK_PY313_SKIP
+def test_target_python_version_313_has_future_annotations(output_file: Path) -> None:
+    """Test that Python 3.13 target includes future annotations import."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "api.yaml",
+            output_path=output_file,
+            input_file_type=None,
+            assert_func=assert_file_content,
+            extra_args=["--target-python-version", "3.13"],
+        )
+
+
+@BLACK_PY314_SKIP
+def test_target_python_version_314_no_future_annotations(output_file: Path) -> None:
+    """Test that Python 3.14 target omits future annotations import (PEP 649)."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "api.yaml",
+            output_path=output_file,
+            input_file_type=None,
+            assert_func=assert_file_content,
+            extra_args=["--target-python-version", "3.14"],
+        )
+
+
+@pytest.mark.benchmark
+def test_main_modular(output_dir: Path) -> None:
+    """Test main function on modular file."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "modular.yaml",
+            output_path=output_dir,
+            expected_directory=EXPECTED_OPENAPI_PATH / "modular",
+        )
+
+
+def test_main_modular_reuse_model(output_dir: Path) -> None:
+    """Test main function on modular file."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "modular.yaml",
+            output_path=output_dir,
+            expected_directory=EXPECTED_OPENAPI_PATH / "modular_reuse_model",
+            extra_args=["--reuse-model"],
+        )
+
+
+def test_main_modular_no_file(tmp_path: Path) -> None:
+    """Test main function on modular file with no output name."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "modular.yaml",
+        output_path=tmp_path / "output.py",
+        input_file_type=None,
+        expected_exit=Exit.ERROR,
+    )
+
+
+def test_main_modular_filename(output_file: Path) -> None:
+    """Test main function on modular file with filename."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "modular.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        expected_exit=Exit.ERROR,
+    )
+
+
+def test_main_openapi_no_file(
+    capsys: pytest.CaptureFixture[str], tmp_path: Path, monkeypatch: pytest.MonkeyPatch
+) -> None:
+    """Test main function on non-modular file with no output name."""
+    monkeypatch.chdir(tmp_path)
+
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "api.yaml",
+            output_path=None,
+            expected_stdout_path=EXPECTED_OPENAPI_PATH / "no_file.py",
+            capsys=capsys,
+            expected_stderr=inferred_message.format("openapi") + "\n",
+        )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "extra_template_data_config.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "extra_template_data_config_pydantic_v2.py",
+        ),
+    ],
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.cli_doc(
+    options=["--extra-template-data"],
+    input_schema="openapi/api.yaml",
+    cli_args=["--extra-template-data", "openapi/extra_data.json"],
+    model_outputs={
+        "pydantic_v1": "openapi/extra_template_data_config.py",
+        "pydantic_v2": "openapi/extra_template_data_config_pydantic_v2.py",
+    },
+)
+def test_main_openapi_extra_template_data_config(
+    capsys: pytest.CaptureFixture,
+    output_model: str,
+    expected_output: str,
+    tmp_path: Path,
+    monkeypatch: pytest.MonkeyPatch,
+) -> None:
+    """Pass custom template variables from JSON file for code generation.
+
+    The `--extra-template-data` flag allows you to provide additional variables
+    (from a JSON file) that can be used in custom templates to configure generated
+    model settings like Config classes, enabling customization beyond standard options.
+    """
+    monkeypatch.chdir(tmp_path)
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "api.yaml",
+            output_path=None,
+            expected_stdout_path=EXPECTED_OPENAPI_PATH / expected_output,
+            capsys=capsys,
+            input_file_type=None,
+            extra_args=[
+                "--extra-template-data",
+                str(OPEN_API_DATA_PATH / "extra_data.json"),
+                "--output-model-type",
+                output_model,
+            ],
+            expected_stderr=inferred_message.format("openapi") + "\n",
+        )
+
+
+def test_main_custom_template_dir_old_style(
+    capsys: pytest.CaptureFixture, tmp_path: Path, monkeypatch: pytest.MonkeyPatch
+) -> None:
+    """Test main function with custom template directory."""
+    monkeypatch.chdir(tmp_path)
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "api.yaml",
+            output_path=None,
+            expected_stdout_path=EXPECTED_OPENAPI_PATH / "custom_template_dir.py",
+            capsys=capsys,
+            input_file_type=None,
+            extra_args=[
+                "--custom-template-dir",
+                str(DATA_PATH / "templates_old_style"),
+                "--extra-template-data",
+                str(OPEN_API_DATA_PATH / "extra_data.json"),
+            ],
+            expected_stderr=inferred_message.format("openapi") + "\n",
+        )
+
+
+@pytest.mark.cli_doc(
+    options=["--custom-template-dir"],
+    input_schema="openapi/api.yaml",
+    cli_args=["--custom-template-dir", "templates", "--extra-template-data", "openapi/extra_data.json"],
+    golden_output="openapi/custom_template_dir.py",
+)
+def test_main_openapi_custom_template_dir(
+    capsys: pytest.CaptureFixture, tmp_path: Path, monkeypatch: pytest.MonkeyPatch
+) -> None:
+    """Use custom Jinja2 templates for model generation.
+
+    The `--custom-template-dir` option allows you to specify a directory containing custom Jinja2 templates
+    to override the default templates used for generating data models. This enables full customization of
+    the generated code structure and formatting. Use with `--extra-template-data` to pass additional data
+    to the templates.
+    """
+    monkeypatch.chdir(tmp_path)
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "api.yaml",
+            output_path=None,
+            expected_stdout_path=EXPECTED_OPENAPI_PATH / "custom_template_dir.py",
+            capsys=capsys,
+            input_file_type=None,
+            extra_args=[
+                "--custom-template-dir",
+                str(DATA_PATH / "templates"),
+                "--extra-template-data",
+                str(OPEN_API_DATA_PATH / "extra_data.json"),
+            ],
+            expected_stderr=inferred_message.format("openapi") + "\n",
+        )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+def test_pyproject(tmp_path: Path) -> None:
+    """Test code generation using pyproject.toml configuration."""
+    if platform.system() == "Windows":
+
+        def get_path(path: str) -> str:
+            return str(path).replace("\\", "\\\\")
+
+    else:
+
+        def get_path(path: str) -> str:
+            return str(path)
+
+    output_file: Path = tmp_path / "output.py"
+    pyproject_toml_path = Path(DATA_PATH) / "project" / "pyproject.toml"
+    pyproject_toml = (
+        pyproject_toml_path.read_text()
+        .replace("INPUT_PATH", get_path(OPEN_API_DATA_PATH / "api.yaml"))
+        .replace("OUTPUT_PATH", get_path(output_file))
+        .replace("ALIASES_PATH", get_path(OPEN_API_DATA_PATH / "empty_aliases.json"))
+        .replace(
+            "EXTRA_TEMPLATE_DATA_PATH",
+            get_path(OPEN_API_DATA_PATH / "empty_data.json"),
+        )
+        .replace("CUSTOM_TEMPLATE_DIR_PATH", get_path(tmp_path))
+    )
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    with chdir(tmp_path):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "api.yaml",
+            output_path=output_file,
+            input_file_type=None,
+            assert_func=assert_file_content,
+        )
+
+
+def test_pyproject_not_found(tmp_path: Path) -> None:
+    """Test code generation when pyproject.toml is not found."""
+    output_file: Path = tmp_path / "output.py"
+    with chdir(tmp_path):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "api.yaml",
+            output_path=output_file,
+            input_file_type=None,
+            assert_func=assert_file_content,
+        )
+
+
+def test_stdin(monkeypatch: pytest.MonkeyPatch, output_file: Path) -> None:
+    """Test OpenAPI code generation from stdin input."""
+    run_main_and_assert(
+        stdin_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        monkeypatch=monkeypatch,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="general.py",
+        transform=lambda s: s.replace("#   filename:  <stdin>", "#   filename:  api.yaml"),
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--validation"],
+    input_schema="openapi/api.yaml",
+    cli_args=["--validation"],
+    golden_output="openapi/general.py",
+)
+def test_validation(mocker: MockerFixture, output_file: Path) -> None:
+    """Enable validation constraints (deprecated, use --field-constraints).
+
+    The `--validation` flag configures the code generation behavior.
+    """
+    mock_prance = mocker.patch("prance.BaseParser")
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="general.py",
+        extra_args=["--validation"],
+    )
+    mock_prance.assert_called_once()
+
+
+def test_validation_failed(mocker: MockerFixture, output_file: Path) -> None:
+    """Test OpenAPI code generation with validation failure."""
+    mock_prance = mocker.patch("prance.BaseParser", side_effect=Exception("error"))
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "invalid.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        expected_exit=Exit.ERROR,
+        extra_args=["--validation"],
+    )
+    mock_prance.assert_called_once()
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output", "args"),
+    [
+        ("pydantic.BaseModel", "with_field_constraints.py", []),
+        (
+            "pydantic.BaseModel",
+            "with_field_constraints_use_unique_items_as_set.py",
+            ["--use-unique-items-as-set"],
+        ),
+        ("pydantic_v2.BaseModel", "with_field_constraints_pydantic_v2.py", []),
+        (
+            "pydantic_v2.BaseModel",
+            "with_field_constraints_pydantic_v2_use_generic_container_types.py",
+            ["--use-generic-container-types"],
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "with_field_constraints_pydantic_v2_use_generic_container_types_set.py",
+            ["--use-generic-container-types", "--use-unique-items-as-set"],
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "with_field_constraints_pydantic_v2_use_standard_collections.py",
+            [
+                "--use-standard-collections",
+            ],
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "with_field_constraints_pydantic_v2_use_standard_collections_set.py",
+            ["--use-standard-collections", "--use-unique-items-as-set"],
+        ),
+    ],
+)
+@pytest.mark.cli_doc(
+    options=["--use-unique-items-as-set"],
+    input_schema="openapi/api_constrained.yaml",
+    cli_args=["--use-unique-items-as-set", "--field-constraints"],
+    golden_output="openapi/with_field_constraints_use_unique_items_as_set.py",
+)
+def test_main_with_field_constraints(
+    output_model: str, expected_output: str, args: list[str], output_file: Path
+) -> None:
+    """Generate set types for arrays with uniqueItems constraint.
+
+    The `--use-unique-items-as-set` flag generates Python set types instead of
+    list types for JSON Schema arrays that have the uniqueItems constraint set
+    to true, enforcing uniqueness at the type level.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api_constrained.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--field-constraints", "--output-model-type", output_model, *args],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--field-constraints"],
+    input_schema="openapi/api_constrained.yaml",
+    cli_args=["--field-constraints"],
+    model_outputs={
+        "pydantic_v1": "main/openapi/with_field_constraints.py",
+        "pydantic_v2": "main/openapi/with_field_constraints_pydantic_v2.py",
+    },
+    primary=True,
+)
+def test_main_field_constraints_model_outputs(output_file: Path) -> None:
+    """Generate Field() with validation constraints from schema.
+
+    The `--field-constraints` flag generates Pydantic Field() definitions with
+    validation constraints (min/max length, pattern, etc.) from the schema.
+    Output differs between Pydantic v1 and v2 due to API changes.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api_constrained.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="with_field_constraints.py",
+        extra_args=["--field-constraints"],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "without_field_constraints.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "without_field_constraints_pydantic_v2.py",
+        ),
+    ],
+)
+def test_main_without_field_constraints(output_model: str, expected_output: str, output_file: Path) -> None:
+    """Test OpenAPI generation without field constraints."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api_constrained.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--output-model-type", output_model],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "with_aliases.py",
+        ),
+        (
+            "msgspec.Struct",
+            "with_aliases_msgspec.py",
+        ),
+    ],
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.cli_doc(
+    options=["--aliases"],
+    input_schema="openapi/api.yaml",
+    cli_args=["--aliases", "openapi/aliases.json", "--target-python", "3.9"],
+    model_outputs={
+        "pydantic_v1": "openapi/with_aliases.py",
+        "msgspec": "openapi/with_aliases_msgspec.py",
+    },
+    primary=True,
+)
+def test_main_with_aliases(output_model: str, expected_output: str, output_file: Path) -> None:
+    """Apply custom field and class name aliases from JSON file.
+
+    The `--aliases` option allows renaming fields and classes via a JSON mapping file,
+    providing fine-grained control over generated names independent of schema definitions.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=[
+            "--aliases",
+            str(OPEN_API_DATA_PATH / "aliases.json"),
+            "--target-python",
+            "3.9",
+            "--output-model-type",
+            output_model,
+        ],
+    )
+
+
+def test_main_with_bad_aliases(output_file: Path) -> None:
+    """Test OpenAPI generation with invalid aliases file."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        expected_exit=Exit.ERROR,
+        extra_args=["--aliases", str(OPEN_API_DATA_PATH / "not.json")],
+    )
+
+
+def test_main_with_more_bad_aliases(output_file: Path) -> None:
+    """Test OpenAPI generation with malformed aliases file."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        expected_exit=Exit.ERROR,
+        extra_args=["--aliases", str(OPEN_API_DATA_PATH / "list.json")],
+    )
+
+
+def test_main_with_bad_extra_data(output_file: Path) -> None:
+    """Test OpenAPI generation with invalid extra template data file."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        expected_exit=Exit.ERROR,
+        extra_args=["--extra-template-data", str(OPEN_API_DATA_PATH / "not.json")],
+    )
+
+
+@pytest.mark.benchmark
+def test_main_with_snake_case_field(output_file: Path) -> None:
+    """Test OpenAPI generation with snake case field naming."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--snake-case-field"],
+    )
+
+
+@pytest.mark.benchmark
+@pytest.mark.cli_doc(
+    options=["--strip-default-none"],
+    input_schema="openapi/api.yaml",
+    cli_args=["--strip-default-none"],
+    golden_output="openapi/with_strip_default_none.py",
+)
+def test_main_with_strip_default_none(output_file: Path) -> None:
+    """Remove fields with None as default value from generated models.
+
+    The `--strip-default-none` option removes fields that have None as their default value from the
+    generated models. This results in cleaner model definitions by excluding optional fields that
+    default to None.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--strip-default-none"],
+    )
+
+
+def test_disable_timestamp(output_file: Path) -> None:
+    """Test OpenAPI generation with timestamp disabled."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--disable-timestamp"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--enable-version-header"],
+    input_schema="openapi/api.yaml",
+    cli_args=["--enable-version-header"],
+    golden_output="openapi/enable_version_header.py",
+)
+def test_enable_version_header(output_file: Path) -> None:
+    """Include tool version information in file header.
+
+    The `--enable-version-header` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="enable_version_header.py",
+        extra_args=["--enable-version-header"],
+        transform=lambda s: s.replace(f"#   version:   {get_version()}", "#   version:   0.0.0"),
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--enable-command-header"],
+    input_schema="openapi/api.yaml",
+    cli_args=["--enable-command-header"],
+    golden_output="openapi/enable_command_header.py",
+)
+def test_enable_command_header(output_file: Path) -> None:
+    """Include command-line options in file header for reproducibility.
+
+    The `--enable-command-header` flag adds the full command-line used to generate
+    the file to the header, making it easy to reproduce the generation.
+    """
+
+    def normalize_command(s: str) -> str:
+        # Replace the actual command line with a placeholder for consistent testing
+        return re.sub(r"#   command:   datamodel-codegen .*", "#   command:   datamodel-codegen [COMMAND]", s)
+
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="enable_command_header.py",
+        extra_args=["--enable-command-header"],
+        transform=normalize_command,
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "allow_population_by_field_name.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "allow_population_by_field_name_pydantic_v2.py",
+        ),
+    ],
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.cli_doc(
+    options=["--allow-population-by-field-name"],
+    input_schema="openapi/api.yaml",
+    cli_args=["--allow-population-by-field-name"],
+    model_outputs={
+        "pydantic_v1": "openapi/allow_population_by_field_name.py",
+        "pydantic_v2": "openapi/allow_population_by_field_name_pydantic_v2.py",
+    },
+)
+def test_allow_population_by_field_name(output_model: str, expected_output: str, output_file: Path) -> None:
+    """Allow Pydantic model population by field name (not just alias).
+
+    The `--allow-population-by-field-name` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--allow-population-by-field-name", "--output-model-type", output_model],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "allow_extra_fields.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "allow_extra_fields_pydantic_v2.py",
+        ),
+    ],
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.cli_doc(
+    options=["--allow-extra-fields"],
+    input_schema="openapi/api.yaml",
+    cli_args=["--allow-extra-fields"],
+    model_outputs={
+        "pydantic_v1": "openapi/allow_extra_fields.py",
+        "pydantic_v2": "openapi/allow_extra_fields_pydantic_v2.py",
+    },
+)
+def test_allow_extra_fields(output_model: str, expected_output: str, output_file: Path) -> None:
+    """Allow extra fields in generated Pydantic models (extra='allow').
+
+    The `--allow-extra-fields` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--allow-extra-fields", "--output-model-type", output_model],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "enable_faux_immutability.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "enable_faux_immutability_pydantic_v2.py",
+        ),
+    ],
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.cli_doc(
+    options=["--enable-faux-immutability"],
+    input_schema="openapi/api.yaml",
+    cli_args=["--enable-faux-immutability"],
+    model_outputs={
+        "pydantic_v1": "openapi/enable_faux_immutability.py",
+        "pydantic_v2": "openapi/enable_faux_immutability_pydantic_v2.py",
+    },
+)
+def test_enable_faux_immutability(output_model: str, expected_output: str, output_file: Path) -> None:
+    """Enable faux immutability in Pydantic v1 models (allow_mutation=False).
+
+    The `--enable-faux-immutability` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--enable-faux-immutability", "--output-model-type", output_model],
+    )
+
+
+@pytest.mark.benchmark
+def test_use_default(output_file: Path) -> None:
+    """Test OpenAPI generation with use default option."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--use-default"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--force-optional"],
+    input_schema="openapi/api.yaml",
+    cli_args=["--force-optional"],
+    golden_output="openapi/force_optional.py",
+)
+@pytest.mark.benchmark
+def test_force_optional(output_file: Path) -> None:
+    """Force all fields to be Optional regardless of required status.
+
+    The `--force-optional` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--force-optional"],
+    )
+
+
+def test_main_with_exclusive(output_file: Path) -> None:
+    """Test OpenAPI generation with exclusive keywords."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "exclusive.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+    )
+
+
+def test_main_subclass_enum(output_file: Path) -> None:
+    """Test OpenAPI generation with subclass enum."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "subclass_enum.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "22",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_specialized_enum(output_file: Path) -> None:
+    """Test OpenAPI generation with specialized enum."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "subclass_enum.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="enum_specialized.py",
+        extra_args=["--target-python-version", "3.11"],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "22",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_specialized_enums_disabled(output_file: Path) -> None:
+    """Test OpenAPI generation with specialized enums disabled."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "subclass_enum.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="subclass_enum.py",
+        extra_args=["--target-python-version", "3.11", "--no-use-specialized-enum"],
+    )
+
+
+def test_main_use_standard_collections(output_dir: Path) -> None:
+    """Test OpenAPI generation with standard collections."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "modular.yaml",
+            output_path=output_dir,
+            expected_directory=EXPECTED_OPENAPI_PATH / "use_standard_collections",
+            extra_args=["--use-standard-collections"],
+        )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_use_generic_container_types(output_dir: Path) -> None:
+    """Test OpenAPI generation with generic container types."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "modular.yaml",
+            output_path=output_dir,
+            expected_directory=EXPECTED_OPENAPI_PATH / "use_generic_container_types",
+            extra_args=["--use-generic-container-types"],
+        )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+@pytest.mark.benchmark
+def test_main_use_generic_container_types_standard_collections(
+    output_dir: Path,
+) -> None:
+    """Test OpenAPI generation with generic container types and standard collections."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "modular.yaml",
+            output_path=output_dir,
+            expected_directory=EXPECTED_OPENAPI_PATH / "use_generic_container_types_standard_collections",
+            extra_args=["--use-generic-container-types", "--use-standard-collections"],
+        )
+
+
+def test_main_original_field_name_delimiter_without_snake_case_field(
+    capsys: pytest.CaptureFixture, output_file: Path
+) -> None:
+    """Test OpenAPI generation with original field name delimiter error."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "modular.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        expected_exit=Exit.ERROR,
+        extra_args=["--original-field-name-delimiter", "-"],
+        capsys=capsys,
+        expected_stderr_contains="`--original-field-name-delimiter` can not be used without `--snake-case-field`.",
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output", "date_type"),
+    [
+        ("pydantic.BaseModel", "datetime.py", "AwareDatetime"),
+        ("pydantic_v2.BaseModel", "datetime_pydantic_v2.py", "AwareDatetime"),
+        ("pydantic_v2.BaseModel", "datetime_pydantic_v2_datetime.py", "datetime"),
+        ("dataclasses.dataclass", "datetime_dataclass.py", "datetime"),
+        ("msgspec.Struct", "datetime_msgspec.py", "datetime"),
+    ],
+)
+@pytest.mark.cli_doc(
+    options=["--output-datetime-class"],
+    input_schema="openapi/datetime.yaml",
+    cli_args=["--output-datetime-class", "AwareDatetime"],
+    golden_output="openapi/datetime_pydantic_v2.py",
+)
+def test_main_openapi_aware_datetime(
+    output_model: str, expected_output: str, date_type: str, output_file: Path
+) -> None:
+    """Specify datetime class type for date-time schema fields.
+
+    The `--output-datetime-class` flag controls which datetime type to use for fields
+    with date-time format. Options include 'AwareDatetime' for timezone-aware datetimes
+    or 'datetime' for standard Python datetime objects.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "datetime.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--output-datetime-class", date_type, "--output-model-type", output_model],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "datetime.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "datetime_pydantic_v2.py",
+        ),
+    ],
+)
+def test_main_openapi_datetime(output_model: str, expected_output: str, output_file: Path) -> None:
+    """Test OpenAPI generation with datetime types."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "datetime.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--output-model-type", output_model],
+    )
+
+
+def test_main_models_not_found(capsys: pytest.CaptureFixture, output_file: Path) -> None:
+    """Test OpenAPI generation with models not found error."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "no_components.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        expected_exit=Exit.ERROR,
+        capsys=capsys,
+        expected_stderr_contains="Models not found in the input data",
+    )
+
+
+@pytest.mark.skipif(
+    version.parse(pydantic.VERSION) < version.parse("1.9.0"),
+    reason="Require Pydantic version 1.9.0 or later ",
+)
+def test_main_openapi_enum_models_as_literal_one(min_version: str, output_file: Path) -> None:
+    """Test OpenAPI generation with one enum model as literal."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "enum_models.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="enum_models/one.py",
+        extra_args=["--enum-field-as-literal", "one", "--target-python-version", min_version],
+    )
+
+
+@pytest.mark.skipif(
+    version.parse(pydantic.VERSION) < version.parse("1.9.0"),
+    reason="Require Pydantic version 1.9.0 or later ",
+)
+@pytest.mark.cli_doc(
+    options=["--use-one-literal-as-default"],
+    input_schema="openapi/enum_models.yaml",
+    cli_args=["--use-one-literal-as-default", "--enum-field-as-literal", "one"],
+    golden_output="openapi/enum_models/one_literal_as_default.py",
+)
+def test_main_openapi_use_one_literal_as_default(min_version: str, output_file: Path) -> None:
+    """Use single literal value as default when enum has only one option.
+
+    The `--use-one-literal-as-default` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "enum_models.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "enum_models" / "one_literal_as_default.py",
+        extra_args=[
+            "--enum-field-as-literal",
+            "one",
+            "--target-python-version",
+            min_version,
+            "--use-one-literal-as-default",
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    version.parse(pydantic.VERSION) < version.parse("1.9.0"),
+    reason="Require Pydantic version 1.9.0 or later ",
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_enum_models_as_literal_all(min_version: str, output_file: Path) -> None:
+    """Test OpenAPI generation with all enum models as literal."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "enum_models.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="enum_models/all.py",
+        extra_args=["--enum-field-as-literal", "all", "--target-python-version", min_version],
+    )
+
+
+@pytest.mark.skipif(
+    version.parse(pydantic.VERSION) < version.parse("1.9.0"),
+    reason="Require Pydantic version 1.9.0 or later ",
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_enum_models_as_literal(output_file: Path) -> None:
+    """Test OpenAPI generation with enum models as literal."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "enum_models.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "enum_models" / "as_literal.py",
+        extra_args=["--enum-field-as-literal", "all", "--target-python-version", f"3.{MIN_VERSION}"],
+    )
+
+
+@pytest.mark.benchmark
+def test_main_openapi_all_of_required(output_file: Path) -> None:
+    """Test OpenAPI generation with allOf required fields."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_required.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_required.py",
+    )
+
+
+@pytest.mark.benchmark
+def test_main_openapi_nullable(output_file: Path) -> None:
+    """Test OpenAPI generation with nullable types."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "nullable.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="nullable.py",
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--strict-nullable"],
+    input_schema="openapi/nullable.yaml",
+    cli_args=["--strict-nullable"],
+    golden_output="openapi/nullable_strict_nullable.py",
+)
+def test_main_openapi_nullable_strict_nullable(output_file: Path) -> None:
+    """Strictly handle nullable types in OpenAPI schemas.
+
+    The `--strict-nullable` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "nullable.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="nullable_strict_nullable.py",
+        extra_args=["--strict-nullable"],
+    )
+
+
+def test_main_openapi_ref_nullable_strict_nullable(output_file: Path) -> None:
+    """Test that nullable attribute from $ref schema is propagated."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "ref_nullable.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="ref_nullable_strict_nullable.py",
+        extra_args=["--strict-nullable", "--use-union-operator"],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "general.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "pydantic_v2.py",
+        ),
+        (
+            "msgspec.Struct",
+            "msgspec_pattern.py",
+        ),
+    ],
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_pattern(output_model: str, expected_output: str, output_file: Path) -> None:
+    """Test OpenAPI generation with pattern validation."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "pattern.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=f"pattern/{expected_output}",
+        extra_args=["--target-python", "3.9", "--output-model-type", output_model],
+        transform=lambda s: s.replace("pattern.yaml", "pattern.json"),
+    )
+
+
+@pytest.mark.parametrize(
+    ("expected_output", "args"),
+    [
+        ("pattern_with_lookaround_pydantic_v2.py", []),
+        (
+            "pattern_with_lookaround_pydantic_v2_field_constraints.py",
+            ["--field-constraints"],
+        ),
+    ],
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] < "22",
+    reason="Installed black doesn't support Python version 3.10",
+)
+def test_main_openapi_pattern_with_lookaround_pydantic_v2(
+    expected_output: str, args: list[str], output_file: Path
+) -> None:
+    """Test OpenAPI generation with pattern lookaround for Pydantic v2."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "pattern_lookaround.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--target-python", "3.9", "--output-model-type", "pydantic_v2.BaseModel", *args],
+    )
+
+
+def test_main_generate_custom_class_name_generator_modular(
+    tmp_path: Path,
+) -> None:
+    """Test OpenAPI generation with custom class name generator in modular mode."""
+    output_path = tmp_path / "model"
+    main_modular_custom_class_name_dir = EXPECTED_OPENAPI_PATH / "modular_custom_class_name"
+
+    def custom_class_name_generator(name: str) -> str:
+        return f"Custom{name[0].upper() + name[1:]}"
+
+    with freeze_time(TIMESTAMP):
+        input_ = (OPEN_API_DATA_PATH / "modular.yaml").relative_to(Path.cwd())
+        assert not input_.is_absolute()
+        generate(
+            input_=input_,
+            input_file_type=InputFileType.OpenAPI,
+            output=output_path,
+            custom_class_name_generator=custom_class_name_generator,
+        )
+
+        assert_directory_content(output_path, main_modular_custom_class_name_dir)
+
+
+def test_main_http_openapi(mocker: MockerFixture, output_file: Path) -> None:
+    """Test OpenAPI code generation from HTTP URL."""
+
+    def get_mock_response(path: str) -> Mock:
+        mock = mocker.Mock()
+        mock.text = (OPEN_API_DATA_PATH / path).read_text()
+        return mock
+
+    httpx_get_mock = mocker.patch(
+        "httpx.get",
+        side_effect=[
+            get_mock_response("refs.yaml"),
+            get_mock_response("definitions.yaml"),
+        ],
+    )
+
+    run_main_url_and_assert(
+        url="https://example.com/refs.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="http_refs.py",
+    )
+    httpx_get_mock.assert_has_calls([
+        call(
+            "https://example.com/refs.yaml",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+        call(
+            "https://teamdigitale.github.io/openapi/0.0.6/definitions.yaml",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+    ])
+
+
+@pytest.mark.cli_doc(
+    options=["--disable-appending-item-suffix"],
+    input_schema="openapi/api_constrained.yaml",
+    cli_args=["--disable-appending-item-suffix", "--field-constraints"],
+    golden_output="openapi/disable_appending_item_suffix.py",
+)
+def test_main_disable_appending_item_suffix(output_file: Path) -> None:
+    """Disable appending 'Item' suffix to array item types.
+
+    The `--disable-appending-item-suffix` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api_constrained.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--field-constraints", "--disable-appending-item-suffix"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--openapi-scopes"],
+    input_schema="openapi/body_and_parameters.yaml",
+    cli_args=["--openapi-scopes", "paths", "schemas"],
+    golden_output="openapi/body_and_parameters/general.py",
+)
+def test_main_openapi_body_and_parameters(output_file: Path) -> None:
+    """Specify OpenAPI scopes to generate (schemas, paths, parameters).
+
+    The `--openapi-scopes` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "body_and_parameters.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "body_and_parameters" / "general.py",
+        extra_args=["--openapi-scopes", "paths", "schemas"],
+    )
+
+
+def test_main_openapi_body_and_parameters_remote_ref(mocker: MockerFixture, output_file: Path) -> None:
+    """Test OpenAPI generation with body and parameters remote reference."""
+    input_path = OPEN_API_DATA_PATH / "body_and_parameters_remote_ref.yaml"
+    person_response = mocker.Mock()
+    person_response.text = input_path.read_text()
+    httpx_get_mock = mocker.patch("httpx.get", side_effect=[person_response])
+
+    run_main_and_assert(
+        input_path=input_path,
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "body_and_parameters" / "remote_ref.py",
+        extra_args=["--openapi-scopes", "paths", "schemas"],
+    )
+    httpx_get_mock.assert_has_calls([
+        call(
+            "https://schema.example",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+    ])
+
+
+def test_main_openapi_body_and_parameters_only_paths(output_file: Path) -> None:
+    """Test OpenAPI generation with only paths scope."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "body_and_parameters.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "body_and_parameters" / "only_paths.py",
+        extra_args=["--openapi-scopes", "paths"],
+    )
+
+
+def test_main_openapi_body_and_parameters_only_schemas(output_file: Path) -> None:
+    """Test OpenAPI generation with only schemas scope."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "body_and_parameters.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "body_and_parameters" / "only_schemas.py",
+        extra_args=["--openapi-scopes", "schemas"],
+    )
+
+
+def test_main_openapi_content_in_parameters(output_file: Path) -> None:
+    """Test OpenAPI generation with content in parameters."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "content_in_parameters.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="content_in_parameters.py",
+    )
+
+
+def test_main_openapi_oas_response_reference(output_file: Path) -> None:
+    """Test OpenAPI generation with OAS response reference."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "oas_response_reference.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="oas_response_reference.py",
+        extra_args=["--openapi-scopes", "paths", "schemas"],
+    )
+
+
+def test_main_openapi_json_pointer(output_file: Path) -> None:
+    """Test OpenAPI generation with JSON pointer references."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "json_pointer.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="json_pointer.py",
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        ("pydantic.BaseModel", "use_annotated_with_field_constraints.py"),
+        (
+            "pydantic_v2.BaseModel",
+            "use_annotated_with_field_constraints_pydantic_v2.py",
+        ),
+    ],
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_use_annotated_with_field_constraints(
+    output_model: str, expected_output: str, min_version: str, output_file: Path
+) -> None:
+    """Test OpenAPI generation with Annotated and field constraints."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api_constrained.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=[
+            "--field-constraints",
+            "--use-annotated",
+            "--target-python-version",
+            min_version,
+            "--output-model-type",
+            output_model,
+        ],
+    )
+
+
+def test_main_nested_enum(output_file: Path) -> None:
+    """Test OpenAPI generation with nested enum."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "nested_enum.json",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+    )
+
+
+def test_openapi_special_yaml_keywords(mocker: MockerFixture, output_file: Path) -> None:
+    """Test OpenAPI generation with special YAML keywords."""
+    mock_prance = mocker.patch("prance.BaseParser")
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "special_yaml_keywords.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="special_yaml_keywords.py",
+        extra_args=["--validation"],
+    )
+    mock_prance.assert_called_once()
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] < "22",
+    reason="Installed black doesn't support Python version 3.10",
+)
+def test_main_openapi_nullable_use_union_operator(output_file: Path) -> None:
+    """Test OpenAPI generation with nullable using union operator."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "nullable.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="nullable_strict_nullable_use_union_operator.py",
+        extra_args=["--use-union-operator", "--strict-nullable"],
+    )
+
+
+def test_external_relative_ref(tmp_path: Path) -> None:
+    """Test OpenAPI generation with external relative references."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "external_relative_ref" / "model_b",
+        output_path=tmp_path,
+        expected_directory=EXPECTED_OPENAPI_PATH / "external_relative_ref",
+    )
+
+
+def test_paths_external_ref(output_file: Path) -> None:
+    """Test OpenAPI generation with external refs in paths without components/schemas."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "paths_external_ref" / "openapi.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="paths_external_ref.py",
+        extra_args=["--openapi-scopes", "paths"],
+    )
+
+
+def test_paths_ref_with_external_schema(output_file: Path) -> None:
+    """Test OpenAPI generation with $ref to external path file containing relative schema refs."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "paths_ref_with_external_schema" / "openapi.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="paths_ref_with_external_schema.py",
+        extra_args=["--openapi-scopes", "schemas", "paths"],
+    )
+
+
+@pytest.mark.benchmark
+def test_main_collapse_root_models(output_file: Path) -> None:
+    """Test OpenAPI generation with collapsed root models."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "not_real_string.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--collapse-root-models"],
+    )
+
+
+def test_main_collapse_root_models_field_constraints(output_file: Path) -> None:
+    """Test OpenAPI generation with collapsed root models and field constraints."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "not_real_string.json",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--collapse-root-models", "--field-constraints"],
+    )
+
+
+def test_main_collapse_root_models_with_references_to_flat_types(output_file: Path) -> None:
+    """Test OpenAPI generation with collapsed root models referencing flat types."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "flat_type.jsonschema",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--collapse-root-models"],
+    )
+
+
+def test_main_openapi_max_items_enum(output_file: Path) -> None:
+    """Test OpenAPI generation with max items enum."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "max_items_enum.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="max_items_enum.py",
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "const.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "const_pydantic_v2.py",
+        ),
+    ],
+)
+def test_main_openapi_const(output_model: str, expected_output: str, output_file: Path) -> None:
+    """Test OpenAPI generation with const values."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "const.json",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--output-model-type", output_model],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "const_field.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "const_field_pydantic_v2.py",
+        ),
+        (
+            "msgspec.Struct",
+            "const_field_msgspec.py",
+        ),
+        (
+            "typing.TypedDict",
+            "const_field_typed_dict.py",
+        ),
+        (
+            "dataclasses.dataclass",
+            "const_field_dataclass.py",
+        ),
+    ],
+)
+@pytest.mark.cli_doc(
+    options=["--collapse-root-models"],
+    input_schema="openapi/const.yaml",
+    cli_args=["--collapse-root-models"],
+    model_outputs={
+        "pydantic_v1": "openapi/const_field.py",
+        "pydantic_v2": "openapi/const_field_pydantic_v2.py",
+        "msgspec": "openapi/const_field_msgspec.py",
+        "typeddict": "openapi/const_field_typed_dict.py",
+        "dataclass": "openapi/const_field_dataclass.py",
+    },
+    comparison_output="openapi/const_baseline.py",
+    primary=True,
+)
+def test_main_openapi_const_field(output_model: str, expected_output: str, output_file: Path) -> None:
+    """Inline root model definitions instead of creating separate wrapper classes.
+
+    The `--collapse-root-models` option generates simpler output by inlining root models
+    directly instead of creating separate wrapper types. This shows how different output
+    model types (Pydantic v1/v2, dataclass, TypedDict, msgspec) handle const fields.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "const.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--output-model-type", output_model, "--collapse-root-models"],
+    )
+
+
+def test_main_openapi_complex_reference(output_file: Path) -> None:
+    """Test OpenAPI generation with complex references."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "complex_reference.json",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="complex_reference.py",
+    )
+
+
+def test_main_openapi_reference_to_object_properties(output_file: Path) -> None:
+    """Test OpenAPI generation with reference to object properties."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "reference_to_object_properties.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="reference_to_object_properties.py",
+    )
+
+
+def test_main_openapi_reference_to_object_properties_collapse_root_models(output_file: Path) -> None:
+    """Test OpenAPI generation with reference to object properties and collapsed root models."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "reference_to_object_properties.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="reference_to_object_properties_collapse_root_models.py",
+        extra_args=["--collapse-root-models"],
+    )
+
+
+def test_main_openapi_override_required_all_of_field(output_file: Path) -> None:
+    """Test OpenAPI generation with override required allOf field."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "override_required_all_of.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="override_required_all_of.py",
+        extra_args=["--collapse-root-models"],
+    )
+
+
+def test_main_openapi_allof_with_required_inherited_fields(output_file: Path) -> None:
+    """Test OpenAPI generation with allOf where required includes inherited fields."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_with_required_inherited_fields.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_with_required_inherited_fields.py",
+    )
+
+
+def test_main_openapi_allof_with_required_inherited_fields_force_optional(output_file: Path) -> None:
+    """Test OpenAPI generation with allOf and --force-optional flag."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_with_required_inherited_fields.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_with_required_inherited_fields_force_optional.py",
+        extra_args=["--force-optional"],
+    )
+
+
+def test_main_openapi_allof_with_required_inherited_nested_object(output_file: Path) -> None:
+    """Test OpenAPI generation with allOf where required includes inherited nested object fields."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_with_required_inherited_nested_object.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_with_required_inherited_nested_object.py",
+    )
+
+
+def test_main_openapi_allof_with_required_inherited_complex_allof(output_file: Path) -> None:
+    """Test OpenAPI generation with allOf where required includes complex allOf fields."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_with_required_inherited_complex_allof.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_with_required_inherited_complex_allof.py",
+    )
+
+
+def test_main_openapi_allof_with_required_inherited_comprehensive(output_file: Path) -> None:
+    """Test OpenAPI generation with allOf covering all type inheritance scenarios."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_with_required_inherited_comprehensive.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_with_required_inherited_comprehensive.py",
+    )
+
+
+def test_main_openapi_allof_partial_override_inherited_types(output_file: Path) -> None:
+    """Test OpenAPI allOf partial overrides inherit parent field types."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_partial_override_inherited_types.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_partial_override_inherited_types.py",
+    )
+
+
+def test_main_openapi_allof_partial_override_array_items(output_file: Path) -> None:
+    """Test OpenAPI allOf partial overrides inherit parent array item types."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_partial_override_array_items.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_partial_override_array_items.py",
+    )
+
+
+def test_main_openapi_allof_partial_override_array_items_no_parent(output_file: Path) -> None:
+    """Test OpenAPI allOf with array field not present in parent schema."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_partial_override_array_items_no_parent.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_partial_override_array_items_no_parent.py",
+    )
+
+
+def test_main_openapi_allof_partial_override_non_array_field(output_file: Path) -> None:
+    """Test OpenAPI allOf partial override with non-array fields for coverage."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_partial_override_non_array_field.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_partial_override_non_array_field.py",
+    )
+
+
+def test_main_openapi_allof_partial_override_nested_array_items(output_file: Path) -> None:
+    """Test OpenAPI allOf partial override with nested arrays for coverage."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_partial_override_nested_array_items.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_partial_override_nested_array_items.py",
+    )
+
+
+def test_main_openapi_allof_partial_override_deeply_nested_array(output_file: Path) -> None:
+    """Test OpenAPI allOf partial override with 3-level nested arrays for while loop coverage."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_partial_override_deeply_nested_array.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_partial_override_deeply_nested_array.py",
+    )
+
+
+def test_main_openapi_allof_partial_override_simple_list_any(output_file: Path) -> None:
+    """Test OpenAPI allOf partial override with simple List[Any] - while loop NOT entered."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_partial_override_simple_list_any.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_partial_override_simple_list_any.py",
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        ("pydantic.BaseModel", "allof_partial_override_unique_items.py"),
+        ("pydantic_v2.BaseModel", "allof_partial_override_unique_items_pydantic_v2.py"),
+    ],
+)
+def test_main_openapi_allof_partial_override_unique_items(
+    output_model: str, expected_output: str, output_file: Path
+) -> None:
+    """Test OpenAPI allOf partial override inherits uniqueItems from parent."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_partial_override_unique_items.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--use-unique-items-as-set", "--output-model-type", output_model],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--allof-merge-mode"],
+    input_schema="openapi/allof_materialize_defaults.yaml",
+    cli_args=["--allof-merge-mode", "all"],
+    golden_output="main/openapi/allof_materialize_defaults.py",
+)
+def test_main_openapi_allof_merge_mode_all(output_file: Path) -> None:
+    """Merge all properties from parent schemas in allOf.
+
+    The `--allof-merge-mode` flag controls how parent schema properties are merged
+    in allOf compositions. With `all` mode, constraints plus annotations (default,
+    examples) are merged from parent properties. This ensures child schemas inherit
+    all metadata from parents.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_materialize_defaults.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_materialize_defaults.py",
+        extra_args=["--allof-merge-mode", "all"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--allof-merge-mode"],
+    input_schema="openapi/allof_merge_mode_none.yaml",
+    cli_args=["--allof-merge-mode", "none"],
+    golden_output="main/openapi/allof_merge_mode_none.py",
+    comparison_output="main/openapi/allof_materialize_defaults.py",
+)
+def test_main_openapi_allof_merge_mode_none(output_file: Path) -> None:
+    """Disable property merging from parent schemas in allOf.
+
+    With `none` mode, no fields are merged from parent properties. This is useful
+    when you want child schemas to define all their own constraints without inheriting
+    from parents.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_merge_mode_none.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_merge_mode_none.py",
+        extra_args=["--allof-merge-mode", "none"],
+    )
+
+
+def test_main_openapi_allof_property_bool_schema(output_file: Path) -> None:
+    """Test OpenAPI allOf with bool property schema (e.g., `allowed: true`)."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_property_bool_schema.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_property_bool_schema.py",
+    )
+
+
+def test_main_openapi_allof_parent_no_properties(output_file: Path) -> None:
+    """Test OpenAPI allOf with parent schema having no properties."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_parent_no_properties.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_parent_no_properties.py",
+    )
+
+
+def test_main_openapi_allof_parent_bool_property(output_file: Path) -> None:
+    """Test OpenAPI allOf with parent having bool property schema (true/false)."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_parent_bool_property.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_parent_bool_property.py",
+    )
+
+
+def test_main_openapi_allof_multiple_parents_same_property(output_file: Path) -> None:
+    """Test OpenAPI allOf with multiple parents having the same property."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_multiple_parents_same_property.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_multiple_parents_same_property.py",
+    )
+
+
+def test_main_openapi_allof_with_required_inherited_edge_cases(output_file: Path) -> None:
+    """Test OpenAPI generation with allOf edge cases for branch coverage."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_with_required_inherited_edge_cases.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="allof_with_required_inherited_edge_cases.py",
+    )
+
+
+@LEGACY_BLACK_SKIP
+def test_main_openapi_allof_with_required_inherited_coverage(output_file: Path) -> None:
+    """Test OpenAPI generation with allOf coverage for edge case branches."""
+    with warnings.catch_warnings(record=True) as w:
+        warnings.simplefilter("always")
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "allof_with_required_inherited_coverage.yaml",
+            output_path=output_file,
+            input_file_type="openapi",
+            assert_func=assert_file_content,
+            expected_file="allof_with_required_inherited_coverage.py",
+        )
+        # Verify the warning was raised for $ref combined with constraints
+        assert any("allOf combines $ref" in str(warning.message) for warning in w)
+
+
+def test_main_use_default_kwarg(output_file: Path) -> None:
+    """Test OpenAPI generation with use default kwarg."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "nullable.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        extra_args=["--use-default-kwarg"],
+    )
+
+
+@pytest.mark.parametrize(
+    ("input_", "output"),
+    [
+        (
+            "discriminator.yaml",
+            "general.py",
+        ),
+        (
+            "discriminator_without_mapping.yaml",
+            "without_mapping.py",
+        ),
+    ],
+)
+def test_main_openapi_discriminator(input_: str, output: str, output_file: Path) -> None:
+    """Test OpenAPI generation with discriminator."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / input_,
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "discriminator" / output,
+    )
+
+
+@freeze_time("2023-07-27")
+@pytest.mark.parametrize(
+    ("kind", "option", "expected"),
+    [
+        (
+            "anyOf",
+            "--collapse-root-models",
+            "in_array_collapse_root_models.py",
+        ),
+        (
+            "oneOf",
+            "--collapse-root-models",
+            "in_array_collapse_root_models.py",
+        ),
+        ("anyOf", None, "in_array.py"),
+        ("oneOf", None, "in_array.py"),
+    ],
+)
+def test_main_openapi_discriminator_in_array(kind: str, option: str | None, expected: str, output_file: Path) -> None:
+    """Test OpenAPI generation with discriminator in array."""
+    input_file = f"discriminator_in_array_{kind.lower()}.yaml"
+    extra_args = [option] if option else []
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / input_file,
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=f"discriminator/{expected}",
+        extra_args=extra_args,
+        transform=lambda s: s.replace(input_file, "discriminator_in_array.yaml"),
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "default_object",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "pydantic_v2_default_object",
+        ),
+        (
+            "msgspec.Struct",
+            "msgspec_default_object",
+        ),
+    ],
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_default_object(output_model: str, expected_output: str, tmp_path: Path) -> None:
+    """Test OpenAPI generation with default object values."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "default_object.yaml",
+        output_path=tmp_path,
+        expected_directory=EXPECTED_OPENAPI_PATH / expected_output,
+        input_file_type="openapi",
+        extra_args=["--output-model-type", output_model, "--target-python-version", "3.9"],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "union_default_object.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "pydantic_v2_union_default_object.py",
+        ),
+        (
+            "msgspec.Struct",
+            "msgspec_union_default_object.py",
+        ),
+    ],
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_union_default_object(output_model: str, expected_output: str, output_file: Path) -> None:
+    """Test OpenAPI generation with Union type default object values."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "union_default_object.yaml",
+        output_path=output_file,
+        expected_file=EXPECTED_OPENAPI_PATH / expected_output,
+        input_file_type="openapi",
+        extra_args=[
+            "--output-model-type",
+            output_model,
+            "--target-python-version",
+            "3.9",
+            "--openapi-scopes",
+            "schemas",
+        ],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "empty_dict_default.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "pydantic_v2_empty_dict_default.py",
+        ),
+        (
+            "msgspec.Struct",
+            "msgspec_empty_dict_default.py",
+        ),
+    ],
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_empty_dict_default(output_model: str, expected_output: str, output_file: Path) -> None:
+    """Test OpenAPI generation with empty dict default values."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "empty_dict_default.yaml",
+        output_path=output_file,
+        expected_file=EXPECTED_OPENAPI_PATH / expected_output,
+        input_file_type="openapi",
+        extra_args=[
+            "--output-model-type",
+            output_model,
+            "--target-python-version",
+            "3.9",
+            "--openapi-scopes",
+            "schemas",
+        ],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        (
+            "pydantic.BaseModel",
+            "empty_list_default.py",
+        ),
+        (
+            "pydantic_v2.BaseModel",
+            "pydantic_v2_empty_list_default.py",
+        ),
+    ],
+)
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_empty_list_default(output_model: str, expected_output: str, output_file: Path) -> None:
+    """Test OpenAPI generation with empty list default values."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "empty_list_default.yaml",
+        output_path=output_file,
+        expected_file=EXPECTED_OPENAPI_PATH / expected_output,
+        input_file_type="openapi",
+        extra_args=[
+            "--output-model-type",
+            output_model,
+            "--target-python-version",
+            "3.9",
+            "--openapi-scopes",
+            "schemas",
+        ],
+    )
+
+
+def test_main_dataclass(output_file: Path) -> None:
+    """Test OpenAPI generation with dataclass output."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--output-model-type", "dataclasses.dataclass"],
+    )
+
+
+def test_main_dataclass_base_class(output_file: Path) -> None:
+    """Test OpenAPI generation with dataclass base class."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--output-model-type", "dataclasses.dataclass", "--base-class", "custom_base.Base"],
+    )
+
+
+def test_main_openapi_reference_same_hierarchy_directory(tmp_path: Path) -> None:
+    """Test OpenAPI generation with reference in same hierarchy directory."""
+    output_file: Path = tmp_path / "output.py"
+    with chdir(OPEN_API_DATA_PATH / "reference_same_hierarchy_directory"):
+        run_main_and_assert(
+            input_path=Path("./public/entities.yaml"),
+            output_path=output_file,
+            input_file_type="openapi",
+            assert_func=assert_file_content,
+            expected_file="reference_same_hierarchy_directory.py",
+        )
+
+
+def test_main_multiple_required_any_of(output_file: Path) -> None:
+    """Test OpenAPI generation with multiple required anyOf."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "multiple_required_any_of.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--collapse-root-models"],
+    )
+
+
+def test_main_openapi_max_min(output_file: Path) -> None:
+    """Test OpenAPI generation with max and min constraints."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "max_min_number.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="max_min_number.py",
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-operation-id-as-name"],
+    input_schema="openapi/api.yaml",
+    cli_args=["--use-operation-id-as-name", "--openapi-scopes", "paths", "schemas", "parameters"],
+    golden_output="openapi/use_operation_id_as_name.py",
+)
+def test_main_openapi_use_operation_id_as_name(output_file: Path) -> None:
+    """Use OpenAPI operationId as the generated function/class name.
+
+    The `--use-operation-id-as-name` flag configures the code generation behavior.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="use_operation_id_as_name.py",
+        extra_args=["--use-operation-id-as-name", "--openapi-scopes", "paths", "schemas", "parameters"],
+    )
+
+
+def test_main_openapi_use_operation_id_as_name_not_found_operation_id(
+    capsys: pytest.CaptureFixture, output_file: Path
+) -> None:
+    """Test OpenAPI generation with operation ID as name when ID not found."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "body_and_parameters.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        expected_exit=Exit.ERROR,
+        extra_args=["--use-operation-id-as-name", "--openapi-scopes", "paths", "schemas", "parameters"],
+        capsys=capsys,
+        expected_stderr_contains="All operations must have an operationId when --use_operation_id_as_name is set.",
+    )
+
+
+def test_main_unsorted_optional_fields(output_file: Path) -> None:
+    """Test OpenAPI generation with unsorted optional fields."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "unsorted_optional_fields.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--output-model-type", "dataclasses.dataclass"],
+    )
+
+
+def test_main_typed_dict(output_file: Path) -> None:
+    """Test OpenAPI generation with TypedDict output."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--output-model-type", "typing.TypedDict"],
+    )
+
+
+def test_main_typed_dict_py(min_version: str, output_file: Path) -> None:
+    """Test OpenAPI generation with TypedDict for specific Python version."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--output-model-type", "typing.TypedDict", "--target-python-version", min_version],
+    )
+
+
+@pytest.mark.skipif(
+    version.parse(black.__version__) < version.parse("23.3.0"),
+    reason="Require Black version 23.3.0 or later ",
+)
+def test_main_modular_typed_dict(output_dir: Path) -> None:
+    """Test main function on modular file."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "modular.yaml",
+            output_path=output_dir,
+            expected_directory=EXPECTED_OPENAPI_PATH / "modular_typed_dict",
+            extra_args=["--output-model-type", "typing.TypedDict", "--target-python-version", "3.11"],
+        )
+
+
+@pytest.mark.skipif(
+    version.parse(black.__version__) < version.parse("23.3.0"),
+    reason="Require Black version 23.3.0 or later ",
+)
+def test_main_typed_dict_nullable(output_file: Path) -> None:
+    """Test OpenAPI generation with nullable TypedDict."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "nullable.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--output-model-type", "typing.TypedDict", "--target-python-version", "3.11"],
+    )
+
+
+@pytest.mark.skipif(
+    version.parse(black.__version__) < version.parse("23.3.0"),
+    reason="Require Black version 23.3.0 or later ",
+)
+def test_main_msgspec_nullable(output_file: Path) -> None:
+    """Test OpenAPI generation with nullable msgspec.Struct."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "nullable.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="msgspec_nullable.py",
+        extra_args=["--output-model-type", "msgspec.Struct", "--target-python-version", "3.11"],
+    )
+
+
+@pytest.mark.skipif(
+    version.parse(black.__version__) < version.parse("23.3.0"),
+    reason="Require Black version 23.3.0 or later ",
+)
+def test_main_typed_dict_nullable_strict_nullable(output_file: Path) -> None:
+    """Test OpenAPI generation with strict nullable TypedDict."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "nullable.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--output-model-type", "typing.TypedDict", "--target-python-version", "3.11", "--strict-nullable"],
+    )
+
+
+@pytest.mark.benchmark
+def test_main_openapi_nullable_31(output_file: Path) -> None:
+    """Test OpenAPI 3.1 generation with nullable types."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "nullable_31.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="nullable_31.py",
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel", "--strip-default-none", "--use-union-operator"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--custom-file-header-path"],
+    input_schema="openapi/api.yaml",
+    cli_args=["--custom-file-header-path", "custom_file_header.txt"],
+    golden_output="openapi/custom_file_header.py",
+)
+def test_main_custom_file_header_path(output_file: Path) -> None:
+    """Add custom header content from file to generated code.
+
+    The `--custom-file-header-path` flag allows you to specify a file containing
+    custom header content (like copyright notices, linting directives, or module docstrings)
+    to be inserted at the top of generated Python files.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="custom_file_header.py",
+        extra_args=["--custom-file-header-path", str(DATA_PATH / "custom_file_header.txt")],
+    )
+
+
+def test_main_custom_file_header_duplicate_options(capsys: pytest.CaptureFixture, output_file: Path) -> None:
+    """Test OpenAPI generation with duplicate custom file header options."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        expected_exit=Exit.ERROR,
+        extra_args=[
+            "--custom-file-header-path",
+            str(DATA_PATH / "custom_file_header.txt"),
+            "--custom-file-header",
+            "abc",
+        ],
+        capsys=capsys,
+        expected_stderr_contains="`--custom_file_header_path` can not be used with `--custom_file_header`.",
+    )
+
+
+def test_main_custom_file_header_with_docstring(output_file: Path) -> None:
+    """Test future import placement after docstring in custom header."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="custom_file_header_with_docstring.py",
+        extra_args=["--custom-file-header-path", str(DATA_PATH / "custom_file_header_with_docstring.txt")],
+    )
+
+
+def test_main_custom_file_header_with_import(output_file: Path) -> None:
+    """Test future import placement before existing imports in custom header."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="custom_file_header_with_import.py",
+        extra_args=["--custom-file-header-path", str(DATA_PATH / "custom_file_header_with_import.txt")],
+    )
+
+
+def test_main_custom_file_header_with_docstring_and_import(output_file: Path) -> None:
+    """Test future import placement with docstring and imports in custom header."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="custom_file_header_with_docstring_and_import.py",
+        extra_args=["--custom-file-header-path", str(DATA_PATH / "custom_file_header_with_docstring_and_import.txt")],
+    )
+
+
+def test_main_custom_file_header_without_future_imports(output_file: Path) -> None:
+    """Test custom header with --disable-future-imports option."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="custom_file_header_no_future.py",
+        extra_args=[
+            "--custom-file-header-path",
+            str(DATA_PATH / "custom_file_header.txt"),
+            "--disable-future-imports",
+        ],
+    )
+
+
+def test_main_custom_file_header_empty(output_file: Path) -> None:
+    """Test empty custom header file."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="custom_file_header_empty.py",
+        extra_args=["--custom-file-header-path", str(DATA_PATH / "custom_file_header_empty.txt")],
+    )
+
+
+def test_main_custom_file_header_invalid_syntax(output_file: Path) -> None:
+    """Test custom header with invalid Python syntax."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="custom_file_header_invalid_syntax.py",
+        extra_args=["--custom-file-header-path", str(DATA_PATH / "custom_file_header_invalid_syntax.txt")],
+        skip_code_validation=True,
+    )
+
+
+def test_main_custom_file_header_comments_only(output_file: Path) -> None:
+    """Test custom header with only comments (no statements)."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="custom_file_header_comments_only.py",
+        extra_args=["--custom-file-header-path", str(DATA_PATH / "custom_file_header_comments_only.txt")],
+    )
+
+
+def test_main_pydantic_v2(output_file: Path) -> None:
+    """Test OpenAPI generation with Pydantic v2 output."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel"],
+    )
+
+
+def test_main_openapi_custom_id_pydantic_v2(output_file: Path) -> None:
+    """Test OpenAPI generation with custom ID for Pydantic v2."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "custom_id.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="custom_id_pydantic_v2.py",
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-serialize-as-any"],
+    input_schema="openapi/serialize_as_any.yaml",
+    cli_args=["--use-serialize-as-any"],
+    golden_output="openapi/serialize_as_any_pydantic_v2.py",
+)
+def test_main_openapi_serialize_as_any_pydantic_v2(output_file: Path) -> None:
+    """Wrap fields with subtypes in Pydantic's SerializeAsAny.
+
+    The `--use-serialize-as-any` flag applies Pydantic v2's SerializeAsAny wrapper
+    to fields that have subtype relationships, ensuring proper serialization of
+    polymorphic types and inheritance hierarchies.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "serialize_as_any.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="serialize_as_any_pydantic_v2.py",
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel", "--use-serialize-as-any"],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_all_of_with_relative_ref(output_file: Path) -> None:
+    """Test OpenAPI generation with allOf and relative reference."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "all_of_with_relative_ref" / "openapi.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="all_of_with_relative_ref.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--keep-model-order",
+            "--collapse-root-models",
+            "--field-constraints",
+            "--use-title-as-name",
+            "--field-include-all-keys",
+            "--use-field-description",
+        ],
+    )
+
+
+def test_main_openapi_msgspec_struct(min_version: str, output_file: Path) -> None:
+    """Test OpenAPI generation with msgspec Struct output."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="msgspec_struct.py",
+        extra_args=["--target-python-version", min_version, "--output-model-type", "msgspec.Struct"],
+    )
+
+
+def test_main_openapi_msgspec_struct_snake_case(min_version: str, output_file: Path) -> None:
+    """Test OpenAPI generation with msgspec Struct and snake case."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api_ordered_required_fields.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="msgspec_struct_snake_case.py",
+        extra_args=[
+            "--target-python-version",
+            min_version,
+            "--snake-case-field",
+            "--output-model-type",
+            "msgspec.Struct",
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+@MSGSPEC_LEGACY_BLACK_SKIP
+def test_main_openapi_msgspec_use_annotated_with_field_constraints(output_file: Path) -> None:
+    """Test OpenAPI generation with msgspec using Annotated and field constraints."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api_constrained.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="msgspec_use_annotated_with_field_constraints.py",
+        extra_args=["--field-constraints", "--target-python-version", "3.9", "--output-model-type", "msgspec.Struct"],
+    )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_file"),
+    [
+        ("pydantic_v2.BaseModel", "discriminator/enum_one_literal_as_default.py"),
+        ("dataclasses.dataclass", "discriminator/dataclass_enum_one_literal_as_default.py"),
+    ],
+)
+@pytest.mark.cli_doc(
+    options=["--use-one-literal-as-default"],
+    input_schema="openapi/discriminator_enum.yaml",
+    cli_args=["--use-one-literal-as-default"],
+    model_outputs={
+        "pydantic_v2": "openapi/discriminator/enum_one_literal_as_default.py",
+        "dataclass": "openapi/discriminator/dataclass_enum_one_literal_as_default.py",
+    },
+)
+def test_main_openapi_discriminator_one_literal_as_default(
+    output_model: str, expected_file: str, output_file: Path
+) -> None:
+    """Set default value when only one literal is valid for a discriminator field.
+
+    The `--use-one-literal-as-default` flag sets default values for discriminator
+    fields when only one literal value is valid, reducing boilerplate in model
+    instantiation.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "discriminator_enum.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / expected_file,
+        extra_args=["--output-model-type", output_model, "--use-one-literal-as-default"],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_discriminator_one_literal_as_default_dataclass_py310(output_file: Path) -> None:
+    """Test OpenAPI generation with discriminator one literal as default for dataclass with Python 3.10+."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "discriminator_enum.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "discriminator" / "dataclass_enum_one_literal_as_default_py310.py",
+        extra_args=[
+            "--output-model-type",
+            "dataclasses.dataclass",
+            "--use-one-literal-as-default",
+            "--target-python-version",
+            "3.10",
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_discriminator_one_literal_as_default_dataclass_py39_warning(output_file: Path) -> None:
+    """Test that Python 3.9 emits warning for dataclass field ordering conflict."""
+    with pytest.warns(UserWarning, match=r"Dataclass .* has a field ordering conflict due to inheritance"):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "discriminator_enum.yaml",
+            output_path=output_file,
+            input_file_type="openapi",
+            assert_func=assert_file_content,
+            expected_file=EXPECTED_OPENAPI_PATH / "discriminator" / "dataclass_enum_one_literal_as_default.py",
+            extra_args=[
+                "--output-model-type",
+                "dataclasses.dataclass",
+                "--use-one-literal-as-default",
+                "--target-python-version",
+                "3.9",
+            ],
+            skip_code_validation=True,
+        )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_dataclass_inheritance_parent_default(output_file: Path) -> None:
+    """Test dataclass field ordering fix when parent has default field."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "dataclass_inheritance_field_ordering.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_OPENAPI_PATH / "dataclass_inheritance_field_ordering_py310.py",
+        extra_args=[
+            "--output-model-type",
+            "dataclasses.dataclass",
+            "--target-python-version",
+            "3.10",
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_keyword_only_dataclass(output_file: Path) -> None:
+    """Test OpenAPI generation with keyword-only dataclass."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "inheritance.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="dataclass_keyword_only.py",
+        extra_args=[
+            "--output-model-type",
+            "dataclasses.dataclass",
+            "--keyword-only",
+            "--target-python-version",
+            "3.10",
+        ],
+    )
+
+
+def test_main_openapi_keyword_only_dataclass_with_python_3_9(capsys: pytest.CaptureFixture, output_file: Path) -> None:
+    """Test OpenAPI generation with keyword-only dataclass for Python 3.9."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "inheritance.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        expected_exit=Exit.ERROR,
+        extra_args=["--output-model-type", "dataclasses.dataclass", "--keyword-only", "--target-python-version", "3.9"],
+        capsys=capsys,
+        expected_stderr_contains="`--keyword-only` requires `--target-python-version` 3.10 or higher.",
+    )
+
+
+def test_main_openapi_dataclass_with_naive_datetime(capsys: pytest.CaptureFixture, output_file: Path) -> None:
+    """Test OpenAPI generation with dataclass using naive datetime."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "inheritance.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        expected_exit=Exit.ERROR,
+        extra_args=[
+            "--output-model-type",
+            "dataclasses.dataclass",
+            "--output-datetime-class",
+            "NaiveDatetime",
+        ],
+        capsys=capsys,
+        expected_stderr_contains=(
+            '`--output-datetime-class` only allows "datetime" for `--output-model-type` dataclasses.dataclass'
+        ),
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_keyword_only_msgspec(min_version: str, output_file: Path) -> None:
+    """Test OpenAPI generation with keyword-only msgspec."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "inheritance.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="msgspec_keyword_only.py",
+        extra_args=["--output-model-type", "msgspec.Struct", "--keyword-only", "--target-python-version", min_version],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_openapi_keyword_only_msgspec_with_extra_data(min_version: str, output_file: Path) -> None:
+    """Test OpenAPI generation with keyword-only msgspec and extra data."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "inheritance.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="msgspec_keyword_only_omit_defaults.py",
+        extra_args=[
+            "--output-model-type",
+            "msgspec.Struct",
+            "--keyword-only",
+            "--target-python-version",
+            min_version,
+            "--extra-template-data",
+            str(OPEN_API_DATA_PATH / "extra_data_msgspec.json"),
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+def test_main_generate_openapi_keyword_only_msgspec_with_extra_data(tmp_path: Path) -> None:
+    """Test OpenAPI generation with keyword-only msgspec using generate function."""
+    extra_data = json.loads((OPEN_API_DATA_PATH / "extra_data_msgspec.json").read_text())
+    output_file: Path = tmp_path / "output.py"
+    generate(
+        input_=OPEN_API_DATA_PATH / "inheritance.yaml",
+        output=output_file,
+        input_file_type=InputFileType.OpenAPI,
+        output_model_type=DataModelType.MsgspecStruct,
+        keyword_only=True,
+        target_python_version=PythonVersionMin,
+        extra_template_data=defaultdict(dict, extra_data),
+        # Following values are defaults in the CLI, but not in the API
+        openapi_scopes=[OpenAPIScope.Schemas],
+        # Following values are implied by `msgspec.Struct` in the CLI
+        use_annotated=True,
+        field_constraints=True,
+    )
+    assert_file_content(output_file, "msgspec_keyword_only_omit_defaults.py")
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] < "22",
+    reason="Installed black doesn't support Python version 3.10",
+)
+@MSGSPEC_LEGACY_BLACK_SKIP
+def test_main_openapi_msgspec_use_union_operator(output_file: Path) -> None:
+    """Test msgspec Struct generation with union operator (Python 3.10+)."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "nullable.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="msgspec_use_union_operator.py",
+        extra_args=[
+            "--output-model-type",
+            "msgspec.Struct",
+            "--use-union-operator",
+            "--target-python-version",
+            "3.10",
+        ],
+    )
+
+
+@MSGSPEC_LEGACY_BLACK_SKIP
+def test_main_openapi_msgspec_anyof(min_version: str, output_file: Path) -> None:
+    """Test msgspec Struct generation with anyOf fields."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "anyof.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="msgspec_anyof.py",
+        extra_args=[
+            "--output-model-type",
+            "msgspec.Struct",
+            "--target-python-version",
+            min_version,
+        ],
+    )
+
+
+@LEGACY_BLACK_SKIP
+def test_main_openapi_msgspec_oneof_with_null(output_file: Path) -> None:
+    """Test msgspec Struct generation with oneOf containing null type."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "msgspec_oneof_with_null.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="msgspec_oneof_with_null.py",
+        extra_args=[
+            "--output-model-type",
+            "msgspec.Struct",
+        ],
+    )
+
+
+@LEGACY_BLACK_SKIP
+def test_main_openapi_msgspec_oneof_with_null_union_operator(output_file: Path) -> None:
+    """Test msgspec Struct generation with oneOf containing null type using union operator."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "msgspec_oneof_with_null.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="msgspec_oneof_with_null_union_operator.py",
+        extra_args=[
+            "--output-model-type",
+            "msgspec.Struct",
+            "--use-union-operator",
+        ],
+    )
+
+
+def test_main_openapi_referenced_default(output_file: Path) -> None:
+    """Test OpenAPI generation with referenced default values."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "referenced_default.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="referenced_default.py",
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel"],
+    )
+
+
+def test_main_openapi_referenced_default_use_annotated(output_file: Path) -> None:
+    """Test OpenAPI generation with referenced default values using --use-annotated."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "referenced_default.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="referenced_default_use_annotated.py",
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel", "--use-annotated"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--parent-scoped-naming"],
+    input_schema="openapi/duplicate_models2.yaml",
+    cli_args=[
+        "--parent-scoped-naming",
+        "--use-operation-id-as-name",
+        "--openapi-scopes",
+        "paths",
+        "schemas",
+        "parameters",
+    ],
+    golden_output="openapi/duplicate_models2.py",
+)
+def test_duplicate_models(output_file: Path) -> None:
+    """Namespace models by their parent scope to avoid naming conflicts.
+
+    The `--parent-scoped-naming` flag prefixes model names with their parent scope
+    (operation/path/parameter) to prevent name collisions when the same model name
+    appears in different contexts within an OpenAPI specification.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "duplicate_models2.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="duplicate_models2.py",
+        extra_args=[
+            "--use-operation-id-as-name",
+            "--openapi-scopes",
+            "paths",
+            "schemas",
+            "parameters",
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--parent-scoped-naming",
+        ],
+    )
+
+
+def test_main_openapi_shadowed_imports(output_file: Path) -> None:
+    """Test OpenAPI generation with shadowed imports."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "shadowed_imports.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="shadowed_imports.py",
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel"],
+    )
+
+
+def test_main_openapi_extra_fields_forbid(output_file: Path) -> None:
+    """Test OpenAPI generation with extra fields forbidden."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "additional_properties.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="additional_properties.py",
+        extra_args=["--extra-fields", "forbid"],
+    )
+
+
+def test_main_openapi_same_name_objects(output_file: Path) -> None:
+    """Test OpenAPI generation with same name objects."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "same_name_objects.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="same_name_objects.py",
+    )
+
+
+def test_main_openapi_type_alias(output_file: Path) -> None:
+    """Test that TypeAliasType is generated for OpenAPI schemas for Python 3.9-3.11."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "type_alias.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="type_alias.py",
+        extra_args=["--use-type-alias"],
+    )
+
+
+@pytest.mark.skipif(
+    int(black.__version__.split(".")[0]) < 23,
+    reason="Installed black doesn't support the new 'type' statement",
+)
+def test_main_openapi_type_alias_py312(output_file: Path) -> None:
+    """Test that type statement syntax is generated for OpenAPI schemas with Python 3.12+ and Pydantic v2."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "type_alias.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="type_alias_py312.py",
+        extra_args=[
+            "--use-type-alias",
+            "--target-python-version",
+            "3.12",
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    int(black.__version__.split(".")[0]) < 23,
+    reason="Installed black doesn't support the target python version",
+)
+def test_main_openapi_type_alias_mutual_recursive_py311(output_file: Path) -> None:  # pragma: no cover
+    """Test mutual recursive type aliases render with quoted forward refs on Python 3.11."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "type_alias_mutual_recursive.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="type_alias_mutual_recursive.py",
+        extra_args=[
+            "--use-type-alias",
+            "--target-python-version",
+            "3.11",
+            "--output-model-type",
+            "pydantic.BaseModel",
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    int(black.__version__.split(".")[0]) < 23,
+    reason="Installed black doesn't support the target python version",
+)
+def test_main_openapi_type_alias_mutual_recursive_typealiastype_py311(output_file: Path) -> None:  # pragma: no cover
+    """Test mutual recursive type aliases render with quoted forward refs for TypeAliasType on Python 3.11."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "type_alias_mutual_recursive.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="msgspec_mutual_type_alias.py",
+        extra_args=[
+            "--use-type-alias",
+            "--target-python-version",
+            "3.11",
+            "--output-model-type",
+            "msgspec.Struct",
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    int(black.__version__.split(".")[0]) < 23,
+    reason="Installed black doesn't support the target python version",
+)
+def test_main_openapi_type_alias_recursive_py311(output_file: Path) -> None:  # pragma: no cover
+    """Test recursive type aliases render with quoted self references on Python 3.11."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "type_alias_recursive.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="type_alias_recursive_py311.py",
+        extra_args=[
+            "--use-type-alias",
+            "--target-python-version",
+            "3.11",
+            "--output-model-type",
+            "pydantic.BaseModel",
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    int(black.__version__.split(".")[0]) < 23,
+    reason="Installed black doesn't support the new 'type' statement",
+)
+def test_main_openapi_type_alias_recursive_py312(output_file: Path) -> None:
+    """
+    Test that handling of type aliases work as expected for recursive types.
+
+    NOTE: applied to python 3.12--14
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "type_alias_recursive.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="type_alias_recursive_py312.py",
+        extra_args=[
+            "--use-type-alias",
+            "--target-python-version",
+            "3.12",
+            "--use-standard-collections",
+            "--use-union-operator",
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+        ],
+    )
+
+
+def test_main_openapi_type_alias_recursive(output_file: Path) -> None:
+    """Test recursive type aliases with proper forward reference quoting."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "type_alias_recursive.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="type_alias_recursive.py",
+        extra_args=["--use-type-alias"],
+    )
+
+
+def test_main_openapi_type_alias_cross_module_collision_a(output_file: Path) -> None:
+    """Test TypeAlias generation for module A in cross-module collision scenario."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "type_alias_cross_module_collision" / "a.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="type_alias_cross_module_collision_a.py",
+        extra_args=[
+            "--use-type-alias",
+            "--target-python-version",
+            "3.10",
+        ],
+    )
+
+
+def test_main_openapi_type_alias_cross_module_collision_b(output_file: Path) -> None:
+    """Test TypeAlias generation for module B with self-referential forward reference."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "type_alias_cross_module_collision" / "b.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="type_alias_cross_module_collision_b.py",
+        extra_args=[
+            "--use-type-alias",
+            "--target-python-version",
+            "3.10",
+        ],
+    )
+
+
+def test_main_openapi_type_alias_forward_ref_multiple(output_file: Path) -> None:
+    """Test TypeAlias with multiple forward references that require quoting."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "type_alias_forward_ref_multiple.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="type_alias_forward_ref_multiple.py",
+        extra_args=[
+            "--use-type-alias",
+            "--target-python-version",
+            "3.10",
+        ],
+    )
+
+
+def test_main_openapi_byte_format(output_file: Path) -> None:
+    """Test OpenAPI generation with byte format."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "byte_format.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="byte_format.py",
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel"],
+    )
+
+
+def test_main_openapi_unquoted_null(output_file: Path) -> None:
+    """Test OpenAPI generation with unquoted null values."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "unquoted_null.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="unquoted_null.py",
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel"],
+    )
+
+
+def test_main_openapi_webhooks(output_file: Path) -> None:
+    """Test OpenAPI generation with webhooks scope."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "webhooks.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        extra_args=["--openapi-scopes", "schemas", "webhooks"],
+    )
+
+
+def test_main_openapi_non_operations_and_security(output_file: Path) -> None:
+    """Test OpenAPI generation with non-operation fields and security inheritance."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "non_operations_and_security.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        extra_args=["--openapi-scopes", "schemas", "paths", "webhooks"],
+    )
+
+
+def test_main_openapi_webhooks_with_parameters(output_file: Path) -> None:
+    """Test OpenAPI generation with webhook-level and operation-level parameters."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "webhooks_with_parameters.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        extra_args=["--openapi-scopes", "schemas", "webhooks", "parameters"],
+    )
+
+
+def test_webhooks_ref_with_external_schema(output_file: Path) -> None:
+    """Test OpenAPI generation with $ref to external webhook file containing relative schema refs."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "webhooks_ref_with_external_schema" / "openapi.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="webhooks_ref_with_external_schema.py",
+        extra_args=["--openapi-scopes", "schemas", "webhooks"],
+    )
+
+
+def test_main_openapi_external_ref_with_transitive_local_ref(output_file: Path) -> None:
+    """Test OpenAPI generation with external ref that has transitive local refs."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "external_ref_with_transitive_local_ref" / "openapi.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="external_ref_with_transitive_local_ref/output.py",
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel"],
+    )
+
+
+def test_main_openapi_namespace_subns_ref(output_dir: Path) -> None:
+    """Test OpenAPI generation with namespaced schema referencing subnamespace.
+
+    Regression test for issue #2366: When a schema with a dot-delimited name
+    (e.g., ns.wrapper) references another schema in a subnamespace
+    (e.g., ns.subns.item), the generated import should be "from . import subns"
+    (same package) instead of "from .. import subns" (parent package).
+    """
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "namespace_subns_ref.json",
+            output_path=output_dir,
+            expected_directory=EXPECTED_OPENAPI_PATH / "namespace_subns_ref",
+        )
+
+
+def test_main_openapi_read_only_write_only_default(output_file: Path) -> None:
+    """Test readOnly/writeOnly default: base model only."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_default.py",
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--read-only-write-only-model-type"],
+    input_schema="openapi/read_only_write_only.yaml",
+    cli_args=["--output-model-type", "pydantic_v2.BaseModel", "--read-only-write-only-model-type", "request-response"],
+    golden_output="openapi/read_only_write_only_request_response.py",
+)
+def test_main_openapi_read_only_write_only_request_response(output_file: Path) -> None:
+    """Generate separate request and response models for readOnly/writeOnly fields.
+
+    The `--read-only-write-only-model-type` option controls how models with readOnly or writeOnly
+    properties are generated. The 'request-response' mode creates separate Request and Response
+    variants for each schema that contains readOnly or writeOnly fields, allowing proper type
+    validation for API requests and responses without a shared base model.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_request_response.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "request-response",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_all(output_file: Path) -> None:
+    """Test readOnly/writeOnly all: Base + Request + Response models."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_all.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "all",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_allof(output_file: Path) -> None:
+    """Test readOnly/writeOnly with allOf inheritance."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only_allof.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_allof.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "all",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_allof_request_response(output_file: Path) -> None:
+    """Test readOnly/writeOnly with allOf using request-response mode (no base model)."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only_allof.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_allof_request_response.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "request-response",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_collision(output_file: Path) -> None:
+    """Test readOnly/writeOnly with name collision (UserRequest already exists)."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only_collision.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_collision.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "all",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_ref(output_file: Path) -> None:
+    """Test readOnly/writeOnly on $ref target schema."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only_ref.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_ref.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "all",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_double_collision(output_file: Path) -> None:
+    """Test readOnly/writeOnly with double collision (UserRequest and UserRequestModel exist)."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only_double_collision.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_double_collision.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "all",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_nested_allof(output_file: Path) -> None:
+    """Test readOnly/writeOnly with nested allOf inheritance."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only_nested_allof.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_nested_allof.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "all",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_union(output_file: Path) -> None:
+    """Test readOnly/writeOnly with Union type field."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only_union.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_union.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "all",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_url_ref(mocker: MockerFixture, output_file: Path) -> None:
+    """Test readOnly/writeOnly with URL $ref to external schema."""
+    remote_schema = (OPEN_API_DATA_PATH / "read_only_write_only_url_ref_remote.yaml").read_text()
+    mock_response = mocker.Mock()
+    mock_response.text = remote_schema
+
+    mocker.patch("httpx.get", return_value=mock_response)
+
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only_url_ref.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_url_ref.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "all",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_allof_url_ref(mocker: MockerFixture, output_file: Path) -> None:
+    """Test readOnly/writeOnly with allOf that references external URL schema."""
+    remote_schema = (OPEN_API_DATA_PATH / "read_only_write_only_allof_url_ref_remote.yaml").read_text()
+    mock_response = mocker.Mock()
+    mock_response.text = remote_schema
+
+    mocker.patch("httpx.get", return_value=mock_response)
+
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only_allof_url_ref.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_allof_url_ref.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "all",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_allof_order(output_file: Path) -> None:
+    """Test readOnly/writeOnly with allOf where child is listed before parent in schema."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only_allof_order.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_allof_order.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "all",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_nested_allof_order(output_file: Path) -> None:
+    """Test readOnly/writeOnly with nested allOf where models are listed in reverse order."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only_nested_allof_order.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_nested_allof_order.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "all",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_allof_required_only(output_file: Path) -> None:
+    """Test readOnly/writeOnly with allOf containing item with only 'required' (no ref, no properties)."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only_allof_required_only.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_allof_required_only.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "all",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_mixed(output_file: Path) -> None:
+    """Test request-response mode generates base models for schemas without readOnly/writeOnly."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only_mixed.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_mixed.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "request-response",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_anyof(output_file: Path) -> None:
+    """Test readOnly/writeOnly detection in anyOf and oneOf compositions."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only_anyof.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_anyof.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "all",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_duplicate_allof_ref(output_file: Path) -> None:
+    """Test readOnly/writeOnly with duplicate $ref in allOf."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only_duplicate_allof_ref.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_duplicate_allof_ref.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "all",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_ref_with_desc(output_file: Path) -> None:
+    """Test readOnly/writeOnly on $ref with description (JsonSchemaObject with ref)."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only_ref_with_desc.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_ref_with_desc.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "all",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_shared_base_ref(output_file: Path) -> None:
+    """Test readOnly/writeOnly with diamond inheritance (shared base via multiple paths)."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only_shared_base_ref.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_shared_base_ref.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "all",
+        ],
+    )
+
+
+def test_main_openapi_read_only_write_only_empty_base(output_file: Path) -> None:
+    """Test readOnly/writeOnly with empty base class (no fields)."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "read_only_write_only_empty_base.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="read_only_write_only_empty_base.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--read-only-write-only-model-type",
+            "all",
+        ],
+    )
+
+
+def test_main_openapi_dot_notation_inheritance(output_dir: Path) -> None:
+    """Test dot notation in schema names with inheritance."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "dot_notation_inheritance.yaml",
+        output_path=output_dir,
+        expected_directory=EXPECTED_OPENAPI_PATH / "dot_notation_inheritance",
+        input_file_type="openapi",
+    )
+
+
+def test_main_openapi_dot_notation_deep_inheritance(output_dir: Path) -> None:
+    """Test dot notation with deep inheritance from ancestor packages (issue #2039)."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "dot_notation_deep_inheritance.yaml",
+        output_path=output_dir,
+        expected_directory=EXPECTED_OPENAPI_PATH / "dot_notation_deep_inheritance",
+        input_file_type="openapi",
+    )
+
+
+def test_main_openapi_strict_types_field_constraints_pydantic_v2(output_file: Path) -> None:
+    """Test strict types with field constraints for pydantic v2 (issue #1884)."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "strict_types_field_constraints.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="strict_types_field_constraints_pydantic_v2.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--field-constraints",
+            "--strict-types",
+            "int",
+            "float",
+            "str",
+        ],
+    )
+
+
+def test_main_openapi_strict_types_field_constraints_msgspec(output_file: Path) -> None:
+    """Test strict types with field constraints for msgspec (issue #1884)."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "strict_types_field_constraints.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="strict_types_field_constraints_msgspec.py",
+        extra_args=[
+            "--output-model-type",
+            "msgspec.Struct",
+            "--field-constraints",
+            "--strict-types",
+            "int",
+            "float",
+            "str",
+        ],
+    )
+
+
+def test_main_openapi_circular_imports_stripe_like(output_dir: Path) -> None:
+    """Test that circular imports between root and submodules are resolved with _internal.py."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "circular_imports_stripe_like.yaml",
+            output_path=output_dir,
+            expected_directory=EXPECTED_OPENAPI_PATH / "circular_imports_stripe_like",
+            input_file_type="openapi",
+        )
+
+
+def test_main_openapi_circular_imports_acyclic(output_dir: Path) -> None:
+    """Test that acyclic dependencies do not create _internal.py."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "circular_imports_acyclic.yaml",
+            output_path=output_dir,
+            expected_directory=EXPECTED_OPENAPI_PATH / "circular_imports_acyclic",
+            input_file_type="openapi",
+        )
+
+
+def test_main_openapi_circular_imports_class_conflict(output_dir: Path) -> None:
+    """Test that class name conflicts in merged _internal.py are resolved with sequential renaming."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "circular_imports_class_conflict.yaml",
+            output_path=output_dir,
+            expected_directory=EXPECTED_OPENAPI_PATH / "circular_imports_class_conflict",
+            input_file_type="openapi",
+        )
+
+
+def test_main_openapi_circular_imports_with_inheritance(output_dir: Path) -> None:
+    """Test that circular imports with base class inheritance are resolved."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "circular_imports_with_inheritance.yaml",
+            output_path=output_dir,
+            expected_directory=EXPECTED_OPENAPI_PATH / "circular_imports_with_inheritance",
+            input_file_type="openapi",
+        )
+
+
+def test_main_openapi_circular_imports_small_cycle(output_dir: Path) -> None:
+    """Test that small 2-module cycles also create _internal.py."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "circular_imports_small_cycle.yaml",
+            output_path=output_dir,
+            expected_directory=EXPECTED_OPENAPI_PATH / "circular_imports_small_cycle",
+            input_file_type="openapi",
+        )
+
+
+def test_main_openapi_circular_imports_different_prefixes(output_dir: Path) -> None:
+    """Test circular imports with different module prefixes (tests LCP computation)."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "circular_imports_different_prefixes.yaml",
+            output_path=output_dir,
+            expected_directory=EXPECTED_OPENAPI_PATH / "circular_imports_different_prefixes",
+            input_file_type="openapi",
+        )
+
+
+def test_main_openapi_circular_imports_mixed_prefixes(output_dir: Path) -> None:
+    """Test circular imports with mixed common/different prefixes (tests LCP break branch)."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "circular_imports_mixed_prefixes.yaml",
+            output_path=output_dir,
+            expected_directory=EXPECTED_OPENAPI_PATH / "circular_imports_mixed_prefixes",
+            input_file_type="openapi",
+        )
+
+
+def test_warning_empty_schemas_with_paths(tmp_path: Path) -> None:
+    """Test warning when components/schemas is empty but paths exist."""
+    openapi_file = tmp_path / "openapi.yaml"
+    openapi_file.write_text("""
+openapi: 3.1.0
+info:
+  title: Test
+  version: '1'
+paths:
+  /test:
+    get:
+      responses:
+        200:
+          description: OK
+""")
+
+    with pytest.warns(UserWarning, match=r"No schemas found.*--openapi-scopes paths"), contextlib.suppress(Exception):
+        generate(openapi_file)
+
+
+def test_main_allof_enum_ref(output_file: Path) -> None:
+    """Test OpenAPI generation with allOf referencing enum from another schema."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_enum_ref.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+    )
+
+
+@pytest.mark.skipif(
+    version.parse(pydantic.VERSION) < version.parse("2.0.0"),
+    reason="Require Pydantic version 2.0.0 or later",
+)
+def test_main_openapi_module_class_name_collision_pydantic_v2(output_dir: Path) -> None:
+    """Test Issue #1994: module and class name collision (e.g., A.A schema)."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "module_class_name_collision" / "openapi.json",
+        output_path=output_dir,
+        expected_directory=EXPECTED_OPENAPI_PATH / "module_class_name_collision",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--openapi-scopes",
+            "schemas",
+            "--openapi-scopes",
+            "paths",
+        ],
+    )
+
+
+@pytest.mark.skipif(
+    version.parse(pydantic.VERSION) < version.parse("2.0.0"),
+    reason="Require Pydantic version 2.0.0 or later",
+)
+def test_main_openapi_module_class_name_collision_deep_pydantic_v2(output_dir: Path) -> None:
+    """Test Issue #1994: deep module collision (e.g., A.B.B schema)."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "module_class_name_collision_deep" / "openapi.json",
+        output_path=output_dir,
+        expected_directory=EXPECTED_OPENAPI_PATH / "module_class_name_collision_deep",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--openapi-scopes",
+            "schemas",
+            "--openapi-scopes",
+            "paths",
+        ],
+    )
+
+
+def test_main_nested_package_enum_default(output_dir: Path) -> None:
+    """Test enum default values use short names in same module with nested package paths."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "nested_package_enum_default.json",
+            output_path=output_dir,
+            expected_directory=EXPECTED_OPENAPI_PATH / "nested_package_enum_default",
+            extra_args=[
+                "--output-model-type",
+                "dataclasses.dataclass",
+                "--set-default-enum-member",
+            ],
+        )
+
+
+def test_main_openapi_x_enum_names(output_file: Path) -> None:
+    """Test OpenAPI generation with x-enumNames extension (NSwag/NJsonSchema style)."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "x_enum_names.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="x_enum_names.py",
+    )
+
+
+def test_main_enum_builtin_conflict(output_file: Path) -> None:
+    """Test enum member names that conflict with str methods get underscore suffix."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "enum_builtin_conflict.yaml",
+            output_path=output_file,
+            input_file_type="openapi",
+            assert_func=assert_file_content,
+            expected_file="enum_builtin_conflict.py",
+            extra_args=["--use-subclass-enum"],
+        )
+
+
+@pytest.mark.parametrize(
+    ("output_model", "expected_output"),
+    [
+        ("pydantic.BaseModel", "unique_items_default_set_pydantic.py"),
+        ("pydantic_v2.BaseModel", "unique_items_default_set_pydantic_v2.py"),
+        ("dataclasses.dataclass", "unique_items_default_set_dataclass.py"),
+        ("msgspec.Struct", "unique_items_default_set_msgspec.py"),
+    ],
+)
+def test_main_unique_items_default_set(output_model: str, expected_output: str, output_file: Path) -> None:
+    """Test --use-unique-items-as-set converts list defaults to set literals."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "unique_items_default_set.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=expected_output,
+        extra_args=["--output-model-type", output_model, "--use-unique-items-as-set"],
+    )
+
+
+def test_main_openapi_null_only_enum(output_file: Path) -> None:
+    """Test OpenAPI generation with enum containing only null value."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "null_only_enum.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file="null_only_enum.py",
+    )
diff -pruN 0.26.4-3/tests/main/test_main_csv.py 0.45.0-1/tests/main/test_main_csv.py
--- 0.26.4-3/tests/main/test_main_csv.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/main/test_main_csv.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,43 @@
+"""Tests for CSV input file code generation."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from tests.conftest import create_assert_file_content
+from tests.main.conftest import (
+    CSV_DATA_PATH,
+    EXPECTED_CSV_PATH,
+    run_main_and_assert,
+)
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    import pytest
+
+
+assert_file_content = create_assert_file_content(EXPECTED_CSV_PATH)
+
+
+def test_csv_file(output_file: Path) -> None:
+    """Test CSV file input code generation."""
+    run_main_and_assert(
+        input_path=CSV_DATA_PATH / "simple.csv",
+        output_path=output_file,
+        input_file_type="csv",
+        assert_func=assert_file_content,
+        expected_file="csv_file_simple.py",
+    )
+
+
+def test_csv_stdin(monkeypatch: pytest.MonkeyPatch, output_file: Path) -> None:
+    """Test CSV stdin input code generation."""
+    run_main_and_assert(
+        stdin_path=CSV_DATA_PATH / "simple.csv",
+        output_path=output_file,
+        monkeypatch=monkeypatch,
+        input_file_type="csv",
+        assert_func=assert_file_content,
+        expected_file="csv_stdin_simple.py",
+    )
diff -pruN 0.26.4-3/tests/main/test_main_general.py 0.45.0-1/tests/main/test_main_general.py
--- 0.26.4-3/tests/main/test_main_general.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/main/test_main_general.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,1059 @@
+"""General integration tests for main code generation functionality."""
+
+from __future__ import annotations
+
+from argparse import ArgumentTypeError, Namespace
+from typing import TYPE_CHECKING
+
+import black
+import pytest
+
+from datamodel_code_generator import (
+    DataModelType,
+    Error,
+    InputFileType,
+    chdir,
+    generate,
+    snooper_to_methods,
+)
+from datamodel_code_generator.__main__ import Config, Exit
+from datamodel_code_generator.arguments import _dataclass_arguments
+from datamodel_code_generator.format import PythonVersion
+from tests.conftest import create_assert_file_content, freeze_time
+from tests.main.conftest import (
+    DATA_PATH,
+    EXPECTED_MAIN_PATH,
+    JSON_SCHEMA_DATA_PATH,
+    OPEN_API_DATA_PATH,
+    PYTHON_DATA_PATH,
+    TIMESTAMP,
+    run_main_and_assert,
+    run_main_with_args,
+)
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from pytest_mock import MockerFixture
+
+assert_file_content = create_assert_file_content(EXPECTED_MAIN_PATH)
+
+
+def test_debug(mocker: MockerFixture) -> None:
+    """Test debug flag functionality."""
+    with pytest.raises(expected_exception=SystemExit):
+        run_main_with_args(["--debug", "--help"])
+
+    mocker.patch("datamodel_code_generator.pysnooper", None)
+    with pytest.raises(expected_exception=SystemExit):
+        run_main_with_args(["--debug", "--help"])
+
+
+def test_snooper_to_methods_without_pysnooper(mocker: MockerFixture) -> None:
+    """Test snooper_to_methods function without pysnooper installed."""
+    mocker.patch("datamodel_code_generator.pysnooper", None)
+    mock = mocker.Mock()
+    assert snooper_to_methods()(mock) == mock
+
+
+@pytest.mark.parametrize(argnames="no_color", argvalues=[False, True])
+def test_show_help(no_color: bool, capsys: pytest.CaptureFixture[str]) -> None:
+    """Test help output with and without color."""
+    args = ["--no-color"] if no_color else []
+    args += ["--help"]
+
+    with pytest.raises(expected_exception=SystemExit) as context:
+        run_main_with_args(args)
+    assert context.value.code == Exit.OK
+
+    output = capsys.readouterr().out
+    assert ("\x1b" not in output) == no_color
+
+
+def test_show_help_when_no_input(mocker: MockerFixture) -> None:
+    """Test help display when no input is provided."""
+    print_help_mock = mocker.patch("datamodel_code_generator.__main__.arg_parser.print_help")
+    isatty_mock = mocker.patch("sys.stdin.isatty", return_value=True)
+    return_code: Exit = run_main_with_args([], expected_exit=Exit.ERROR)
+    assert return_code == Exit.ERROR
+    assert isatty_mock.called
+    assert print_help_mock.called
+
+
+def test_no_args_has_default(monkeypatch: pytest.MonkeyPatch) -> None:
+    """No argument should have a default value set because it would override pyproject.toml values.
+
+    Default values are set in __main__.Config class.
+    """
+    namespace = Namespace()
+    monkeypatch.setattr("datamodel_code_generator.__main__.namespace", namespace)
+    run_main_with_args([], expected_exit=Exit.ERROR)
+    for field in Config.get_fields():
+        assert getattr(namespace, field, None) is None
+
+
+def test_space_and_special_characters_dict(output_file: Path) -> None:
+    """Test dict input with space and special characters."""
+    run_main_and_assert(
+        input_path=PYTHON_DATA_PATH / "space_and_special_characters_dict.py",
+        output_path=output_file,
+        input_file_type="dict",
+        assert_func=assert_file_content,
+    )
+
+
+@freeze_time("2024-12-14")
+def test_direct_input_dict(tmp_path: Path) -> None:
+    """Test direct dict input code generation."""
+    output_file = tmp_path / "output.py"
+    generate(
+        {"foo": 1, "bar": {"baz": 2}},
+        input_file_type=InputFileType.Dict,
+        output=output_file,
+        output_model_type=DataModelType.PydanticV2BaseModel,
+        snake_case_field=True,
+    )
+    assert_file_content(output_file)
+
+
+@freeze_time(TIMESTAMP)
+@pytest.mark.parametrize(
+    ("keyword_only", "target_python_version", "expected_file"),
+    [
+        (False, PythonVersion.PY_39, "frozen_dataclasses.py"),
+        (True, PythonVersion.PY_310, "frozen_dataclasses_keyword_only.py"),
+    ],
+)
+def test_frozen_dataclasses(
+    tmp_path: Path,
+    keyword_only: bool,
+    target_python_version: PythonVersion,
+    expected_file: str,
+) -> None:
+    """Test --frozen-dataclasses flag functionality."""
+    output_file = tmp_path / "output.py"
+    generate(
+        DATA_PATH / "jsonschema" / "simple_frozen_test.json",
+        input_file_type=InputFileType.JsonSchema,
+        output=output_file,
+        output_model_type=DataModelType.DataclassesDataclass,
+        frozen_dataclasses=True,
+        keyword_only=keyword_only,
+        target_python_version=target_python_version,
+    )
+    assert_file_content(output_file, expected_file)
+
+
+@pytest.mark.cli_doc(
+    options=["--frozen-dataclasses"],
+    input_schema="jsonschema/simple_frozen_test.json",
+    cli_args=["--output-model-type", "dataclasses.dataclass", "--frozen-dataclasses"],
+    golden_output="frozen_dataclasses.py",
+    related_options=["--keyword-only", "--output-model-type"],
+)
+@freeze_time(TIMESTAMP)
+@pytest.mark.parametrize(
+    ("extra_args", "expected_file"),
+    [
+        (["--output-model-type", "dataclasses.dataclass", "--frozen-dataclasses"], "frozen_dataclasses.py"),
+        (
+            [
+                "--output-model-type",
+                "dataclasses.dataclass",
+                "--frozen-dataclasses",
+                "--keyword-only",
+                "--target-python-version",
+                "3.10",
+            ],
+            "frozen_dataclasses_keyword_only.py",
+        ),
+    ],
+)
+def test_frozen_dataclasses_command_line(output_file: Path, extra_args: list[str], expected_file: str) -> None:
+    """Generate frozen dataclasses with optional keyword-only fields.
+
+    The `--frozen-dataclasses` flag generates dataclass instances that are immutable
+    (frozen=True). Combined with `--keyword-only` (Python 3.10+), all fields become
+    keyword-only arguments.
+    """
+    run_main_and_assert(
+        input_path=DATA_PATH / "jsonschema" / "simple_frozen_test.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=expected_file,
+        extra_args=extra_args,
+    )
+
+
+@freeze_time(TIMESTAMP)
+def test_use_attribute_docstrings(tmp_path: Path) -> None:
+    """Test --use-attribute-docstrings flag functionality."""
+    output_file = tmp_path / "output.py"
+    generate(
+        DATA_PATH / "jsonschema" / "use_attribute_docstrings_test.json",
+        input_file_type=InputFileType.JsonSchema,
+        output=output_file,
+        output_model_type=DataModelType.PydanticV2BaseModel,
+        use_field_description=True,
+        use_attribute_docstrings=True,
+    )
+    assert_file_content(output_file)
+
+
+@freeze_time(TIMESTAMP)
+@pytest.mark.cli_doc(
+    options=["--use-attribute-docstrings"],
+    input_schema="jsonschema/use_attribute_docstrings_test.json",
+    cli_args=[
+        "--output-model-type",
+        "pydantic_v2.BaseModel",
+        "--use-field-description",
+        "--use-attribute-docstrings",
+    ],
+    golden_output="use_attribute_docstrings.py",
+    related_options=["--use-field-description"],
+)
+def test_use_attribute_docstrings_command_line(output_file: Path) -> None:
+    """Generate field descriptions as attribute docstrings instead of Field descriptions.
+
+    The `--use-attribute-docstrings` flag places field descriptions in Python docstring
+    format (PEP 224 attribute docstrings) rather than in Field(..., description=...).
+    This provides better IDE support for hovering over attributes. Requires
+    `--use-field-description` to be enabled.
+    """
+    run_main_and_assert(
+        input_path=DATA_PATH / "jsonschema" / "use_attribute_docstrings_test.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="use_attribute_docstrings.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--use-field-description",
+            "--use-attribute-docstrings",
+        ],
+    )
+
+
+def test_filename_with_newline_injection(tmp_path: Path) -> None:
+    """Test that filenames with newlines cannot inject code into generated files."""
+    schema_content = """{"type": "object", "properties": {"name": {"type": "string"}}}"""
+
+    malicious_filename = """schema.json
+# INJECTED CODE:
+import os
+os.system('echo INJECTED')
+# END INJECTION"""
+
+    output_path = tmp_path / "output.py"
+
+    generate(
+        input_=schema_content,
+        input_filename=malicious_filename,
+        input_file_type=InputFileType.JsonSchema,
+        output=output_path,
+    )
+
+    generated_content = output_path.read_text()
+
+    assert "#   filename:  schema.json # INJECTED CODE: import os" in generated_content, (
+        "Filename not properly sanitized"
+    )
+
+    assert not any(
+        line.strip().startswith("import os") and not line.strip().startswith("#")
+        for line in generated_content.split("\n")
+    )
+    assert not any("os.system" in line and not line.strip().startswith("#") for line in generated_content.split("\n"))
+
+    compile(generated_content, str(output_path), "exec")
+
+
+def test_filename_with_various_control_characters(tmp_path: Path) -> None:
+    """Test that various control characters in filenames are properly sanitized."""
+    schema_content = """{"type": "object", "properties": {"test": {"type": "string"}}}"""
+
+    test_cases = [
+        ("newline", "schema.json\nimport os; os.system('echo INJECTED')"),
+        ("carriage_return", "schema.json\rimport os; os.system('echo INJECTED')"),
+        ("crlf", "schema.json\r\nimport os; os.system('echo INJECTED')"),
+        ("tab_newline", "schema.json\t\nimport os; os.system('echo TAB')"),
+        ("form_feed", "schema.json\f\nimport os; os.system('echo FF')"),
+        ("vertical_tab", "schema.json\v\nimport os; os.system('echo VT')"),
+        ("unicode_line_separator", "schema.json\u2028import os; os.system('echo U2028')"),
+        ("unicode_paragraph_separator", "schema.json\u2029import os; os.system('echo U2029')"),
+        ("multiple_newlines", "schema.json\n\n\nimport os; os.system('echo MULTI')"),
+        ("mixed_characters", "schema.json\n\r\t\nimport os; os.system('echo MIXED')"),
+    ]
+
+    for test_name, malicious_filename in test_cases:
+        output_path = tmp_path / "output.py"
+
+        generate(
+            input_=schema_content,
+            input_filename=malicious_filename,
+            input_file_type=InputFileType.JsonSchema,
+            output=output_path,
+        )
+
+        generated_content = output_path.read_text()
+
+        assert not any(
+            line.strip().startswith("import ") and not line.strip().startswith("#")
+            for line in generated_content.split("\n")
+        ), f"Injection found for {test_name}"
+
+        assert not any(
+            "os.system" in line and not line.strip().startswith("#") for line in generated_content.split("\n")
+        ), f"System call found for {test_name}"
+
+        compile(generated_content, str(output_path), "exec")
+
+
+def test_generate_with_nonexistent_file(tmp_path: Path) -> None:
+    """Test that generating from a nonexistent file raises an error."""
+    nonexistent_file = tmp_path / "nonexistent.json"
+    output_file = tmp_path / "output.py"
+
+    with pytest.raises(Error, match="File not found"):
+        generate(
+            input_=nonexistent_file,
+            output=output_file,
+        )
+
+
+def test_generate_with_invalid_file_format(tmp_path: Path) -> None:
+    """Test that generating from an invalid file format raises an error."""
+    invalid_file = tmp_path / "invalid.txt"
+    invalid_file.write_text("this is not valid json or yaml or anything")
+    output_file = tmp_path / "output.py"
+
+    with pytest.raises(Error, match="Invalid file format"):
+        generate(
+            input_=invalid_file,
+            output=output_file,
+        )
+
+
+def test_generate_cli_command_with_no_use_specialized_enum(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
+    """Test --generate-cli-command with use-specialized-enum = false."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+input = "schema.yaml"
+use-specialized-enum = false
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    with chdir(tmp_path):
+        run_main_with_args(
+            ["--generate-cli-command"],
+            capsys=capsys,
+            expected_stdout_path=EXPECTED_MAIN_PATH / "generate_cli_command" / "no_use_specialized_enum.txt",
+        )
+
+
+def test_generate_cli_command_with_false_boolean(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
+    """Test --generate-cli-command with regular boolean set to false (should be skipped)."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+input = "schema.yaml"
+snake-case-field = false
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    with chdir(tmp_path):
+        run_main_with_args(
+            ["--generate-cli-command"],
+            capsys=capsys,
+            expected_stdout_path=EXPECTED_MAIN_PATH / "generate_cli_command" / "false_boolean.txt",
+        )
+
+
+def test_generate_cli_command_with_true_boolean(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
+    """Test --generate-cli-command with boolean set to true."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+input = "schema.yaml"
+snake-case-field = true
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    with chdir(tmp_path):
+        run_main_with_args(
+            ["--generate-cli-command"],
+            capsys=capsys,
+            expected_stdout_path=EXPECTED_MAIN_PATH / "generate_cli_command" / "true_boolean.txt",
+        )
+
+
+def test_generate_cli_command_with_list_option(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
+    """Test --generate-cli-command with list option."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+input = "schema.yaml"
+strict-types = ["str", "int"]
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    with chdir(tmp_path):
+        run_main_with_args(
+            ["--generate-cli-command"],
+            capsys=capsys,
+            expected_stdout_path=EXPECTED_MAIN_PATH / "generate_cli_command" / "list_option.txt",
+        )
+
+
+@pytest.mark.parametrize(
+    ("json_str", "expected"),
+    [
+        ('{"frozen": true, "slots": true}', {"frozen": True, "slots": True}),
+        ("{}", {}),
+    ],
+)
+def test_dataclass_arguments_valid(json_str: str, expected: dict) -> None:
+    """Test that valid JSON is parsed correctly."""
+    assert _dataclass_arguments(json_str) == expected
+
+
+@pytest.mark.parametrize(
+    ("json_str", "match"),
+    [
+        ("not-valid-json", "Invalid JSON:"),
+        ("[1, 2, 3]", "Expected a JSON dictionary, got list"),
+        ('"just a string"', "Expected a JSON dictionary, got str"),
+        ("42", "Expected a JSON dictionary, got int"),
+        ('{"invalid_key": true}', "Invalid keys:"),
+        ('{"frozen": "not_bool"}', "Expected bool for 'frozen', got str"),
+    ],
+)
+def test_dataclass_arguments_invalid(json_str: str, match: str) -> None:
+    """Test that invalid input raises ArgumentTypeError."""
+    with pytest.raises(ArgumentTypeError, match=match):
+        _dataclass_arguments(json_str)
+
+
+def test_skip_root_model(tmp_path: Path) -> None:
+    """Test --skip-root-model flag functionality using generate()."""
+    output_file = tmp_path / "output.py"
+    generate(
+        DATA_PATH / "jsonschema" / "skip_root_model_test.json",
+        input_file_type=InputFileType.JsonSchema,
+        output=output_file,
+        output_model_type=DataModelType.PydanticV2BaseModel,
+        skip_root_model=True,
+    )
+    assert_file_content(output_file, "skip_root_model.py")
+
+
+@pytest.mark.cli_doc(
+    options=["--skip-root-model"],
+    input_schema="jsonschema/skip_root_model_test.json",
+    cli_args=["--output-model-type", "pydantic_v2.BaseModel", "--skip-root-model"],
+    golden_output="skip_root_model.py",
+)
+def test_skip_root_model_command_line(output_file: Path) -> None:
+    """Skip generation of root model when schema contains nested definitions.
+
+    The `--skip-root-model` flag prevents generating a model for the root schema object
+    when the schema primarily contains reusable definitions. This is useful when the root
+    object is just a container for $defs and not a meaningful model itself.
+    """
+    run_main_and_assert(
+        input_path=DATA_PATH / "jsonschema" / "skip_root_model_test.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file="skip_root_model.py",
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel", "--skip-root-model"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--check"],
+    input_schema="jsonschema/person.json",
+    cli_args=["--disable-timestamp", "--check"],
+    golden_output="person.py",
+)
+def test_check_file_matches(output_file: Path) -> None:
+    """Verify generated code matches existing output without modifying files.
+
+    The `--check` flag compares the generated output with existing files and exits with
+    a non-zero status if they differ. Useful for CI/CD validation to ensure schemas
+    and generated code stay in sync. Works with both single files and directory outputs.
+    """
+    input_path = DATA_PATH / "jsonschema" / "person.json"
+    run_main_and_assert(
+        input_path=input_path,
+        output_path=output_file,
+        input_file_type="jsonschema",
+        extra_args=["--disable-timestamp"],
+    )
+    run_main_and_assert(
+        input_path=input_path,
+        output_path=output_file,
+        input_file_type="jsonschema",
+        extra_args=["--disable-timestamp", "--check"],
+        expected_exit=Exit.OK,
+    )
+
+
+def test_check_file_does_not_exist(tmp_path: Path) -> None:
+    """Test --check returns DIFF when file does not exist."""
+    run_main_and_assert(
+        input_path=DATA_PATH / "jsonschema" / "person.json",
+        output_path=tmp_path / "nonexistent.py",
+        input_file_type="jsonschema",
+        extra_args=["--disable-timestamp", "--check"],
+        expected_exit=Exit.DIFF,
+    )
+
+
+def test_check_file_differs(output_file: Path) -> None:
+    """Test --check returns DIFF when file content differs."""
+    output_file.write_text("# Different content\n", encoding="utf-8")
+    run_main_and_assert(
+        input_path=DATA_PATH / "jsonschema" / "person.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        extra_args=["--disable-timestamp", "--check"],
+        expected_exit=Exit.DIFF,
+    )
+
+
+def test_check_with_stdout_output(capsys: pytest.CaptureFixture[str]) -> None:
+    """Test --check with stdout output returns error."""
+    run_main_and_assert(
+        input_path=DATA_PATH / "jsonschema" / "person.json",
+        output_path=None,
+        input_file_type="jsonschema",
+        extra_args=["--check"],
+        expected_exit=Exit.ERROR,
+        capsys=capsys,
+        expected_stderr_contains="--check cannot be used with stdout",
+    )
+
+
+def test_check_with_nonexistent_input(tmp_path: Path) -> None:
+    """Test --check with nonexistent input file returns error."""
+    run_main_and_assert(
+        input_path=tmp_path / "nonexistent.json",
+        output_path=tmp_path / "output.py",
+        input_file_type="jsonschema",
+        extra_args=["--check"],
+        expected_exit=Exit.ERROR,
+    )
+
+
+def test_check_normalizes_line_endings(output_file: Path) -> None:
+    """Test --check normalizes line endings (CRLF vs LF)."""
+    input_path = DATA_PATH / "jsonschema" / "person.json"
+    run_main_and_assert(
+        input_path=input_path,
+        output_path=output_file,
+        input_file_type="jsonschema",
+        extra_args=["--disable-timestamp"],
+    )
+    content = output_file.read_text(encoding="utf-8")
+    output_file.write_bytes(content.replace("\n", "\r\n").encode("utf-8"))
+    run_main_and_assert(
+        input_path=input_path,
+        output_path=output_file,
+        input_file_type="jsonschema",
+        extra_args=["--disable-timestamp", "--check"],
+        expected_exit=Exit.OK,
+    )
+
+
+def test_check_directory_matches(output_dir: Path) -> None:
+    """Test --check returns OK when directory matches."""
+    input_path = OPEN_API_DATA_PATH / "modular.yaml"
+    run_main_and_assert(
+        input_path=input_path,
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=["--disable-timestamp"],
+    )
+    run_main_and_assert(
+        input_path=input_path,
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=["--disable-timestamp", "--check"],
+        expected_exit=Exit.OK,
+    )
+
+
+def test_check_directory_file_differs(output_dir: Path) -> None:
+    """Test --check returns DIFF when a file in directory differs."""
+    input_path = OPEN_API_DATA_PATH / "modular.yaml"
+    run_main_and_assert(
+        input_path=input_path,
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=["--disable-timestamp"],
+    )
+    py_files = list(output_dir.rglob("*.py"))
+    py_files[0].write_text("# Modified content\n", encoding="utf-8")
+    run_main_and_assert(
+        input_path=input_path,
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=["--disable-timestamp", "--check"],
+        expected_exit=Exit.DIFF,
+    )
+
+
+def test_check_directory_missing_file(output_dir: Path) -> None:
+    """Test --check returns DIFF when a generated file is missing."""
+    input_path = OPEN_API_DATA_PATH / "modular.yaml"
+    run_main_and_assert(
+        input_path=input_path,
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=["--disable-timestamp"],
+    )
+    py_files = list(output_dir.rglob("*.py"))
+    py_files[0].unlink()
+    run_main_and_assert(
+        input_path=input_path,
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=["--disable-timestamp", "--check"],
+        expected_exit=Exit.DIFF,
+    )
+
+
+def test_check_directory_extra_file(output_dir: Path) -> None:
+    """Test --check returns DIFF when an extra file exists."""
+    input_path = OPEN_API_DATA_PATH / "modular.yaml"
+    run_main_and_assert(
+        input_path=input_path,
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=["--disable-timestamp"],
+    )
+    (output_dir / "extra_model.py").write_text("# Extra file\n", encoding="utf-8")
+    run_main_and_assert(
+        input_path=input_path,
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=["--disable-timestamp", "--check"],
+        expected_exit=Exit.DIFF,
+    )
+
+
+def test_check_directory_does_not_exist(tmp_path: Path) -> None:
+    """Test --check returns DIFF when output directory does not exist."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "modular.yaml",
+        output_path=tmp_path / "nonexistent_model",
+        input_file_type="openapi",
+        extra_args=["--disable-timestamp", "--check"],
+        expected_exit=Exit.DIFF,
+    )
+
+
+def test_check_directory_ignores_pycache(output_dir: Path) -> None:
+    """Test --check ignores __pycache__ directories in actual output."""
+    input_path = OPEN_API_DATA_PATH / "modular.yaml"
+    run_main_and_assert(
+        input_path=input_path,
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=["--disable-timestamp"],
+    )
+    pycache_dir = output_dir / "__pycache__"
+    pycache_dir.mkdir()
+    (pycache_dir / "module.cpython-313.pyc").write_bytes(b"fake bytecode")
+    (pycache_dir / "extra.py").write_text("# should be ignored\n", encoding="utf-8")
+    run_main_and_assert(
+        input_path=input_path,
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=["--disable-timestamp", "--check"],
+        expected_exit=Exit.OK,
+    )
+
+
+def test_check_with_invalid_class_name(tmp_path: Path) -> None:
+    """Test --check cleans up temp directory when InvalidClassNameError occurs."""
+    invalid_schema = tmp_path / "invalid.json"
+    invalid_schema.write_text('{"title": "123InvalidName", "type": "object"}', encoding="utf-8")
+    output_path = tmp_path / "output.py"
+    run_main_and_assert(
+        input_path=invalid_schema,
+        output_path=output_path,
+        input_file_type="jsonschema",
+        extra_args=["--check"],
+        expected_exit=Exit.ERROR,
+        expected_stderr_contains="You have to set `--class-name` option",
+    )
+
+
+def test_check_with_invalid_file_format(tmp_path: Path) -> None:
+    """Test --check cleans up temp directory when Error occurs (invalid file format)."""
+    invalid_file = tmp_path / "invalid.txt"
+    invalid_file.write_text("This is not a valid schema format!!!", encoding="utf-8")
+    output_path = tmp_path / "output.py"
+    run_main_and_assert(
+        input_path=invalid_file,
+        output_path=output_path,
+        extra_args=["--check"],
+        expected_exit=Exit.ERROR,
+        expected_stderr_contains="Invalid file format",
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--all-exports-scope"],
+    input_schema="openapi/modular.yaml",
+    cli_args=["--all-exports-scope", "children"],
+    golden_output="openapi/modular_all_exports_children",
+    related_options=["--all-exports-collision-strategy"],
+)
+def test_all_exports_scope_children(output_dir: Path) -> None:
+    """Generate __all__ exports for child modules in __init__.py files.
+
+    The `--all-exports-scope=children` flag adds __all__ to each __init__.py containing
+    exports from direct child modules. This improves IDE autocomplete and explicit exports.
+    Use 'recursive' to include all descendant exports with collision handling.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "modular.yaml",
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=["--disable-timestamp", "--all-exports-scope", "children"],
+        expected_directory=EXPECTED_MAIN_PATH / "openapi" / "modular_all_exports_children",
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--all-exports-collision-strategy"],
+    input_schema="openapi/modular.yaml",
+    cli_args=["--all-exports-scope", "recursive", "--all-exports-collision-strategy", "minimal-prefix"],
+    golden_output="openapi/modular_all_exports_recursive",
+    related_options=["--all-exports-scope"],
+)
+def test_all_exports_scope_recursive_with_collision(output_dir: Path) -> None:
+    """Handle name collisions when exporting recursive module hierarchies.
+
+    The `--all-exports-collision-strategy` flag determines how to resolve naming conflicts
+    when using `--all-exports-scope=recursive`. The 'minimal-prefix' strategy adds the
+    minimum module path prefix needed to disambiguate colliding names, while 'full-prefix'
+    uses the complete module path. Requires `--all-exports-scope=recursive`.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "modular.yaml",
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=[
+            "--disable-timestamp",
+            "--all-exports-scope",
+            "recursive",
+            "--all-exports-collision-strategy",
+            "minimal-prefix",
+        ],
+        expected_directory=EXPECTED_MAIN_PATH / "openapi" / "modular_all_exports_recursive",
+    )
+
+
+def test_all_exports_scope_children_with_docstring_header(output_dir: Path) -> None:
+    """Test --all-exports-scope=children with --custom-file-header containing docstring."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "modular.yaml",
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=[
+            "--all-exports-scope",
+            "children",
+            "--custom-file-header-path",
+            str(DATA_PATH / "custom_file_header_docstring.txt"),
+        ],
+        expected_directory=EXPECTED_MAIN_PATH / "openapi" / "modular_all_exports_children_docstring",
+    )
+
+
+def test_all_exports_scope_recursive_collision_avoided_by_renaming(output_dir: Path) -> None:
+    """Test --all-exports-scope=recursive avoids collision through automatic class renaming.
+
+    With circular import resolution, conflicting class names (e.g., foo.Tea and nested.foo.Tea)
+    are automatically renamed (e.g., Tea and Tea_1) in _internal.py, so no collision error occurs.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "modular.yaml",
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=["--disable-timestamp", "--all-exports-scope", "recursive"],
+    )
+
+    # Verify both Tea and Tea_1 exist in _internal.py (collision avoided through renaming)
+    internal_content = (output_dir / "_internal.py").read_text()
+    assert "class Tea(BaseModel):" in internal_content, "Tea class should exist in _internal.py"
+    assert "class Tea_1(BaseModel):" in internal_content, "Tea_1 class should exist in _internal.py"
+
+
+def test_all_exports_collision_strategy_requires_recursive(output_dir: Path) -> None:
+    """Test --all-exports-collision-strategy requires --all-exports-scope=recursive."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "modular.yaml",
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=[
+            "--all-exports-scope",
+            "children",
+            "--all-exports-collision-strategy",
+            "minimal-prefix",
+        ],
+        expected_exit=Exit.ERROR,
+        expected_stderr_contains="--all-exports-collision-strategy",
+    )
+
+
+def test_all_exports_scope_recursive_with_full_prefix(output_dir: Path) -> None:
+    """Test --all-exports-scope=recursive with --all-exports-collision-strategy=full-prefix."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "modular.yaml",
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=[
+            "--disable-timestamp",
+            "--all-exports-scope",
+            "recursive",
+            "--all-exports-collision-strategy",
+            "full-prefix",
+        ],
+        expected_directory=EXPECTED_MAIN_PATH / "openapi" / "modular_all_exports_recursive_full_prefix",
+    )
+
+
+@pytest.mark.parametrize(
+    "strategy",
+    ["minimal-prefix", "full-prefix"],
+    ids=["minimal_prefix", "full_prefix"],
+)
+def test_all_exports_recursive_prefix_collision_with_local_model(output_dir: Path, strategy: str) -> None:
+    """Test that prefix resolution raises error when renamed export collides with local model."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "all_exports_prefix_collision.yaml",
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=[
+            "--all-exports-scope",
+            "recursive",
+            "--all-exports-collision-strategy",
+            strategy,
+        ],
+        expected_exit=Exit.ERROR,
+        expected_stderr_contains="InputMessage",
+    )
+
+
+def test_all_exports_scope_recursive_jsonschema_multi_file(output_dir: Path) -> None:
+    """Test --all-exports-scope=recursive with JSONSchema multi-file input."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "all_exports_multi_file",
+        output_path=output_dir,
+        input_file_type="jsonschema",
+        extra_args=[
+            "--disable-timestamp",
+            "--all-exports-scope",
+            "recursive",
+        ],
+        expected_directory=EXPECTED_MAIN_PATH / "jsonschema" / "all_exports_multi_file",
+    )
+
+
+def test_all_exports_recursive_local_model_collision_error(output_dir: Path) -> None:
+    """Test --all-exports-scope=recursive raises error when child export collides with local model."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "all_exports_local_collision.yaml",
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=[
+            "--all-exports-scope",
+            "recursive",
+        ],
+        expected_exit=Exit.ERROR,
+        expected_stderr_contains="conflicts with a model in __init__.py",
+    )
+
+
+def test_all_exports_scope_children_no_child_exports(output_dir: Path) -> None:
+    """Test --all-exports-scope=children when __init__.py has models but no direct child exports."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "all_exports_no_child.yaml",
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=[
+            "--disable-timestamp",
+            "--all-exports-scope",
+            "children",
+        ],
+        expected_directory=EXPECTED_MAIN_PATH / "openapi" / "all_exports_no_child",
+    )
+
+
+def test_all_exports_scope_children_with_local_models(output_dir: Path) -> None:
+    """Test --all-exports-scope=children when __init__.py has both local models and child exports."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "all_exports_with_local_models.yaml",
+        output_path=output_dir,
+        input_file_type="openapi",
+        extra_args=[
+            "--disable-timestamp",
+            "--all-exports-scope",
+            "children",
+        ],
+        expected_directory=EXPECTED_MAIN_PATH / "openapi" / "all_exports_with_local_models",
+    )
+
+
+def test_check_respects_pyproject_toml_settings(tmp_path: Path) -> None:
+    """Test --check uses pyproject.toml formatter settings from output path."""
+    pyproject_toml = tmp_path / "pyproject.toml"
+    pyproject_toml.write_text("[tool.black]\nline-length = 60\n", encoding="utf-8")
+
+    input_json = tmp_path / "input.json"
+    input_json.write_text(
+        """{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "title": "Person",
+  "type": "object",
+  "properties": {
+    "firstName": {"type": "string", "description": "The person's first name description that is very long."}
+  }
+}""",
+        encoding="utf-8",
+    )
+
+    output_file = tmp_path / "output.py"
+    run_main_and_assert(
+        input_path=input_json,
+        output_path=output_file,
+        input_file_type="jsonschema",
+        extra_args=["--disable-timestamp"],
+    )
+
+    run_main_and_assert(
+        input_path=input_json,
+        output_path=output_file,
+        input_file_type="jsonschema",
+        extra_args=["--disable-timestamp", "--check"],
+        expected_exit=Exit.OK,
+    )
+
+
+def test_use_specialized_enum_requires_python_311(
+    tmp_path: Path,
+    capsys: pytest.CaptureFixture[str],
+) -> None:
+    """Test --use-specialized-enum requires --target-python-version 3.11+."""
+    input_json = tmp_path / "input.json"
+    input_json.write_text(
+        '{"type": "string", "enum": ["A", "B"]}',
+        encoding="utf-8",
+    )
+
+    run_main_and_assert(
+        input_path=input_json,
+        output_path=tmp_path / "output.py",
+        input_file_type="jsonschema",
+        extra_args=["--use-specialized-enum"],
+        expected_exit=Exit.ERROR,
+        capsys=capsys,
+        expected_stderr_contains="--use-specialized-enum requires --target-python-version 3.11 or later",
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "22",
+    reason="Installed black doesn't support StrEnum formatting",
+)
+def test_use_specialized_enum_with_python_311_ok(output_file: Path) -> None:
+    """Test --use-specialized-enum works with --target-python-version 3.11."""
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "string_enum.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        extra_args=["--use-specialized-enum", "--target-python-version", "3.11"],
+        assert_func=assert_file_content,
+        expected_file="use_specialized_enum_py311.py",
+    )
+
+
+def test_use_specialized_enum_pyproject_requires_python_311(
+    tmp_path: Path,
+    capsys: pytest.CaptureFixture[str],
+) -> None:
+    """Test use_specialized_enum in pyproject.toml requires target_python_version 3.11+."""
+    pyproject_toml = tmp_path / "pyproject.toml"
+    pyproject_toml.write_text(
+        "[tool.datamodel-codegen]\nuse_specialized_enum = true\n",
+        encoding="utf-8",
+    )
+
+    input_json = tmp_path / "input.json"
+    input_json.write_text(
+        '{"type": "string", "enum": ["A", "B"]}',
+        encoding="utf-8",
+    )
+
+    with chdir(tmp_path):
+        run_main_and_assert(
+            input_path=input_json,
+            output_path=tmp_path / "output.py",
+            input_file_type="jsonschema",
+            expected_exit=Exit.ERROR,
+            capsys=capsys,
+            expected_stderr_contains="--use-specialized-enum requires --target-python-version 3.11 or later",
+        )
+
+
+def test_use_specialized_enum_pyproject_override_with_cli(output_file: Path, tmp_path: Path) -> None:
+    """Test --no-use-specialized-enum CLI can override pyproject.toml use_specialized_enum=true."""
+    pyproject_toml = tmp_path / "pyproject.toml"
+    pyproject_toml.write_text(
+        "[tool.datamodel-codegen]\nuse_specialized_enum = true\n",
+        encoding="utf-8",
+    )
+
+    with chdir(tmp_path):
+        run_main_and_assert(
+            input_path=JSON_SCHEMA_DATA_PATH / "string_enum.json",
+            output_path=output_file,
+            input_file_type="jsonschema",
+            extra_args=["--no-use-specialized-enum"],
+            assert_func=assert_file_content,
+            expected_file="no_use_specialized_enum.py",
+        )
+
+
+@pytest.mark.cli_doc(
+    options=["--module-split-mode"],
+    input_schema="jsonschema/module_split_single/input.json",
+    cli_args=["--module-split-mode", "single", "--all-exports-scope", "recursive", "--use-exact-imports"],
+    golden_output="jsonschema/module_split_single",
+    related_options=["--all-exports-scope", "--use-exact-imports"],
+)
+def test_module_split_mode_single(output_dir: Path) -> None:
+    """Split generated models into separate files, one per model class.
+
+    The `--module-split-mode=single` flag generates each model class in its own file,
+    named after the class in snake_case. Use with `--all-exports-scope=recursive` to
+    create an __init__.py that re-exports all models for convenient imports.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "module_split_single" / "input.json",
+        output_path=output_dir,
+        input_file_type="jsonschema",
+        extra_args=[
+            "--disable-timestamp",
+            "--module-split-mode",
+            "single",
+            "--all-exports-scope",
+            "recursive",
+            "--use-exact-imports",
+        ],
+        expected_directory=EXPECTED_MAIN_PATH / "jsonschema" / "module_split_single",
+    )
diff -pruN 0.26.4-3/tests/main/test_main_json.py 0.45.0-1/tests/main/test_main_json.py
--- 0.26.4-3/tests/main/test_main_json.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/main/test_main_json.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,179 @@
+"""Tests for JSON input file code generation."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from unittest.mock import call
+
+import black
+import pytest
+from packaging import version
+
+from datamodel_code_generator import chdir
+from datamodel_code_generator.__main__ import Exit
+from tests.conftest import create_assert_file_content
+from tests.main.conftest import (
+    EXPECTED_JSON_PATH,
+    JSON_DATA_PATH,
+    run_main_and_assert,
+    run_main_url_and_assert,
+)
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+    from pytest_mock import MockerFixture
+
+
+assert_file_content = create_assert_file_content(EXPECTED_JSON_PATH)
+
+
+@pytest.mark.cli_doc(
+    options=["--input-file-type"],
+    input_schema="json/pet.json",
+    cli_args=["--input-file-type", "json"],
+    golden_output="json/general.py",
+)
+def test_main_json(output_file: Path) -> None:
+    """Specify the input file type for code generation.
+
+    The `--input-file-type` flag explicitly sets the input format when it cannot
+    be auto-detected from the file extension. Supported types: openapi, jsonschema,
+    json, yaml, csv, graphql.
+    """
+    run_main_and_assert(
+        input_path=JSON_DATA_PATH / "pet.json",
+        output_path=output_file,
+        input_file_type="json",
+        assert_func=assert_file_content,
+        expected_file="general.py",
+    )
+
+
+def test_space_and_special_characters_json(output_file: Path) -> None:
+    """Test JSON code generation with space and special characters."""
+    run_main_and_assert(
+        input_path=JSON_DATA_PATH / "space_and_special_characters.json",
+        output_path=output_file,
+        input_file_type="json",
+        assert_func=assert_file_content,
+        expected_file="space_and_special_characters.py",
+    )
+
+
+def test_main_json_failed(output_file: Path, capsys: pytest.CaptureFixture[str]) -> None:
+    """Test JSON code generation with broken input file."""
+    run_main_and_assert(
+        input_path=JSON_DATA_PATH / "broken.json",
+        output_path=output_file,
+        input_file_type="json",
+        expected_exit=Exit.ERROR,
+        capsys=capsys,
+        expected_stderr_contains="Invalid file format",
+    )
+
+
+def test_main_json_array_include_null(output_file: Path) -> None:
+    """Test JSON code generation with arrays including null values."""
+    run_main_and_assert(
+        input_path=JSON_DATA_PATH / "array_include_null.json",
+        output_path=output_file,
+        input_file_type="json",
+        assert_func=assert_file_content,
+    )
+
+
+def test_main_json_reuse_model(output_file: Path) -> None:
+    """Test JSON code generation with model reuse."""
+    run_main_and_assert(
+        input_path=JSON_DATA_PATH / "duplicate_models.json",
+        output_path=output_file,
+        input_file_type="json",
+        assert_func=assert_file_content,
+        extra_args=["--reuse-model"],
+    )
+
+
+def test_main_json_reuse_model_pydantic2(output_file: Path) -> None:
+    """Test JSON code generation with model reuse and Pydantic v2."""
+    run_main_and_assert(
+        input_path=JSON_DATA_PATH / "duplicate_models.json",
+        output_path=output_file,
+        input_file_type="json",
+        assert_func=assert_file_content,
+        extra_args=["--output-model-type", "pydantic_v2.BaseModel", "--reuse-model"],
+    )
+
+
+def test_simple_json_snake_case_field(output_file: Path) -> None:
+    """Test JSON code generation with snake case field naming."""
+    with chdir(JSON_DATA_PATH):
+        run_main_and_assert(
+            input_path=JSON_DATA_PATH / "simple.json",
+            output_path=output_file,
+            input_file_type="json",
+            assert_func=assert_file_content,
+            extra_args=["--snake-case-field"],
+        )
+
+
+def test_main_http_json(mocker: MockerFixture, output_file: Path) -> None:
+    """Test JSON code generation from HTTP URL."""
+
+    def get_mock_response(path: str) -> mocker.Mock:
+        mock = mocker.Mock()
+        mock.text = (JSON_DATA_PATH / path).read_text()
+        return mock
+
+    httpx_get_mock = mocker.patch(
+        "httpx.get",
+        side_effect=[
+            get_mock_response("pet.json"),
+        ],
+    )
+    run_main_url_and_assert(
+        url="https://example.com/pet.json",
+        output_path=output_file,
+        input_file_type="json",
+        assert_func=assert_file_content,
+        expected_file="general.py",
+        transform=lambda s: s.replace(
+            "#   filename:  https://example.com/pet.json",
+            "#   filename:  pet.json",
+        ),
+    )
+    httpx_get_mock.assert_has_calls([
+        call(
+            "https://example.com/pet.json",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+    ])
+
+
+@pytest.mark.skipif(
+    version.parse(black.__version__) < version.parse("23.3.0"),
+    reason="Require Black version 23.3.0 or later ",
+)
+def test_main_typed_dict_space_and_special_characters(output_file: Path) -> None:
+    """Test TypedDict generation with space and special characters."""
+    run_main_and_assert(
+        input_path=JSON_DATA_PATH / "space_and_special_characters.json",
+        output_path=output_file,
+        input_file_type="json",
+        assert_func=assert_file_content,
+        extra_args=["--output-model-type", "typing.TypedDict", "--target-python-version", "3.11"],
+    )
+
+
+def test_main_json_snake_case_field(output_file: Path) -> None:
+    """Test JSON code generation with snake case field naming."""
+    run_main_and_assert(
+        input_path=JSON_DATA_PATH / "snake_case.json",
+        output_path=output_file,
+        input_file_type="json",
+        assert_func=assert_file_content,
+        extra_args=["--snake-case-field"],
+    )
diff -pruN 0.26.4-3/tests/main/test_main_watch.py 0.45.0-1/tests/main/test_main_watch.py
--- 0.26.4-3/tests/main/test_main_watch.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/main/test_main_watch.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,254 @@
+"""Tests for watch mode functionality."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+from unittest.mock import MagicMock, patch
+
+import pytest
+
+from datamodel_code_generator.__main__ import Exit
+from tests.main.conftest import JSON_SCHEMA_DATA_PATH, run_main_with_args
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+
+@pytest.mark.cli_doc(
+    options=["--watch"],
+    input_schema="jsonschema/person.json",
+    cli_args=["--watch", "--check"],
+    expected_stdout="Error: --watch and --check cannot be used together",
+)
+def test_watch_with_check_error(output_file: Path) -> None:
+    """Watch mode cannot be used with --check mode.
+
+    The `--watch` flag enables file watching for automatic regeneration.
+    It cannot be combined with `--check` since check mode requires a single
+    comparison, not continuous watching.
+    """
+    return_code = run_main_with_args(
+        [
+            "--watch",
+            "--check",
+            "--input",
+            str(JSON_SCHEMA_DATA_PATH / "person.json"),
+            "--output",
+            str(output_file),
+        ],
+        expected_exit=Exit.ERROR,
+    )
+    assert return_code == Exit.ERROR
+
+
+@pytest.mark.cli_doc(
+    options=["--watch"],
+    cli_args=["--watch", "--url", "https://example.com/schema.json"],
+    expected_stdout="Error: --watch requires --input file path",
+)
+def test_watch_with_url_error() -> None:
+    """Watch mode requires a file path input, not a URL.
+
+    The `--watch` flag monitors local files for changes. It cannot be used
+    with `--url` since remote URLs cannot be watched for changes.
+    """
+    return_code = run_main_with_args(
+        [
+            "--watch",
+            "--url",
+            "https://example.com/schema.json",
+        ],
+        expected_exit=Exit.ERROR,
+    )
+    assert return_code == Exit.ERROR
+
+
+def test_watch_without_input_error(mocker: pytest.MockerFixture) -> None:
+    """Watch mode requires --input file path."""
+    mocker.patch("sys.stdin.isatty", return_value=False)
+    mocker.patch("sys.stdin.read", return_value='{"type": "object"}')
+    return_code = run_main_with_args(
+        ["--watch"],
+        expected_exit=Exit.ERROR,
+    )
+    assert return_code == Exit.ERROR
+
+
+def test_watch_without_watchfiles_installed(output_file: Path, mocker: pytest.MockerFixture) -> None:
+    """Test error message when watchfiles is not installed."""
+    mocker.patch.dict("sys.modules", {"watchfiles": None})
+    mocker.patch(
+        "datamodel_code_generator.watch._get_watchfiles",
+        side_effect=Exception("Please run `pip install 'datamodel-code-generator[watch]'` to use watch mode"),
+    )
+    return_code = run_main_with_args(
+        [
+            "--watch",
+            "--input",
+            str(JSON_SCHEMA_DATA_PATH / "person.json"),
+            "--output",
+            str(output_file),
+        ],
+        expected_exit=Exit.ERROR,
+    )
+    assert return_code == Exit.ERROR
+
+
+def test_get_watchfiles_import_error() -> None:
+    """Test _get_watchfiles raises exception when watchfiles is not installed."""
+    from datamodel_code_generator.watch import _get_watchfiles
+
+    with patch.dict("sys.modules", {"watchfiles": None}), pytest.raises(Exception, match="pip install"):
+        _get_watchfiles()
+
+
+def test_get_watchfiles_success() -> None:
+    """Test _get_watchfiles returns watchfiles module when installed."""
+    from datamodel_code_generator.watch import _get_watchfiles
+
+    result = _get_watchfiles()
+    assert result is not None
+    assert hasattr(result, "watch")
+
+
+@pytest.mark.cli_doc(
+    options=["--watch", "--watch-delay"],
+    input_schema="jsonschema/person.json",
+    cli_args=["--watch", "--watch-delay", "1.5"],
+    expected_stdout="Watching",
+)
+def test_watch_and_regenerate_starts_and_stops() -> None:
+    """Watch mode starts file watcher and handles clean exit.
+
+    The `--watch` flag starts a file watcher that monitors the input file
+    or directory for changes. The `--watch-delay` option sets the debounce
+    delay in seconds (default: 0.5) to prevent multiple regenerations for
+    rapid file changes. Press Ctrl+C to stop watching.
+    """
+    from datamodel_code_generator.__main__ import Config
+    from datamodel_code_generator.watch import watch_and_regenerate
+
+    mock_watchfiles = MagicMock()
+    mock_watchfiles.watch.return_value = iter([])
+    config = Config(input=str(JSON_SCHEMA_DATA_PATH / "person.json"), watch_delay=0.5)
+
+    with patch(
+        "datamodel_code_generator.watch._get_watchfiles",
+        return_value=mock_watchfiles,
+    ):
+        result = watch_and_regenerate(config, None, None, None)
+        assert result == Exit.OK
+        mock_watchfiles.watch.assert_called_once()
+        call_kwargs = mock_watchfiles.watch.call_args.kwargs
+        assert call_kwargs.get("debounce") == 500
+        assert call_kwargs.get("recursive") is False
+
+
+def test_watch_and_regenerate_without_input() -> None:
+    """Test watch_and_regenerate returns error when input is None."""
+    from datamodel_code_generator.__main__ import Config
+    from datamodel_code_generator.watch import watch_and_regenerate
+
+    mock_watchfiles = MagicMock()
+    config = Config(input=None)
+
+    with patch(
+        "datamodel_code_generator.watch._get_watchfiles",
+        return_value=mock_watchfiles,
+    ):
+        result = watch_and_regenerate(config, None, None, None)
+        assert result == Exit.ERROR
+
+
+def test_watch_and_regenerate_with_directory() -> None:
+    """Test that watch_and_regenerate handles directory input with recursive watching."""
+    from datamodel_code_generator.__main__ import Config
+    from datamodel_code_generator.watch import watch_and_regenerate
+
+    mock_watchfiles = MagicMock()
+    mock_watchfiles.watch.return_value = iter([])
+    config = Config(input=str(JSON_SCHEMA_DATA_PATH), watch_delay=0.1)
+
+    with patch(
+        "datamodel_code_generator.watch._get_watchfiles",
+        return_value=mock_watchfiles,
+    ):
+        result = watch_and_regenerate(config, None, None, None)
+        assert result == Exit.OK
+        call_kwargs = mock_watchfiles.watch.call_args.kwargs
+        assert call_kwargs.get("recursive") is True
+
+
+def test_watch_and_regenerate_handles_keyboard_interrupt() -> None:
+    """Test that watch_and_regenerate handles KeyboardInterrupt gracefully."""
+    from datamodel_code_generator.__main__ import Config
+    from datamodel_code_generator.watch import watch_and_regenerate
+
+    mock_watchfiles = MagicMock()
+    mock_watchfiles.watch.side_effect = KeyboardInterrupt()
+    config = Config(input=str(JSON_SCHEMA_DATA_PATH / "person.json"))
+
+    with patch(
+        "datamodel_code_generator.watch._get_watchfiles",
+        return_value=mock_watchfiles,
+    ):
+        result = watch_and_regenerate(config, None, None, None)
+        assert result == Exit.OK
+
+
+def test_watch_and_regenerate_on_change(tmp_path: Path) -> None:
+    """Test that watch_and_regenerate calls generate on file change."""
+    from datamodel_code_generator.__main__ import Config
+    from datamodel_code_generator.watch import watch_and_regenerate
+
+    output_file = tmp_path / "output.py"
+    mock_watchfiles = MagicMock()
+    mock_watchfiles.watch.return_value = iter([
+        {("modified", str(JSON_SCHEMA_DATA_PATH / "person.json"))},
+    ])
+    config = Config(
+        input=str(JSON_SCHEMA_DATA_PATH / "person.json"),
+        output=output_file,
+    )
+    mock_generate = MagicMock()
+
+    with (
+        patch(
+            "datamodel_code_generator.watch._get_watchfiles",
+            return_value=mock_watchfiles,
+        ),
+        patch(
+            "datamodel_code_generator.__main__.run_generate_from_config",
+            mock_generate,
+        ),
+    ):
+        result = watch_and_regenerate(config, None, None, None)
+        assert result == Exit.OK
+        mock_generate.assert_called_once()
+
+
+def test_watch_and_regenerate_handles_generation_error(capsys: pytest.CaptureFixture[str]) -> None:
+    """Test that watch_and_regenerate continues after generation error."""
+    from datamodel_code_generator.__main__ import Config
+    from datamodel_code_generator.watch import watch_and_regenerate
+
+    mock_watchfiles = MagicMock()
+    mock_watchfiles.watch.return_value = iter([
+        {("modified", str(JSON_SCHEMA_DATA_PATH / "person.json"))},
+    ])
+    config = Config(input=str(JSON_SCHEMA_DATA_PATH / "person.json"))
+
+    with (
+        patch(
+            "datamodel_code_generator.watch._get_watchfiles",
+            return_value=mock_watchfiles,
+        ),
+        patch(
+            "datamodel_code_generator.__main__.run_generate_from_config",
+            side_effect=Exception("Generation failed"),
+        ),
+    ):
+        result = watch_and_regenerate(config, None, None, None)
+        assert result == Exit.OK
+        captured = capsys.readouterr()
+        assert "Generation failed" in captured.err
diff -pruN 0.26.4-3/tests/main/test_main_yaml.py 0.45.0-1/tests/main/test_main_yaml.py
--- 0.26.4-3/tests/main/test_main_yaml.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/main/test_main_yaml.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,43 @@
+"""Tests for YAML input file code generation."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+from datamodel_code_generator.__main__ import Exit
+from tests.conftest import create_assert_file_content
+from tests.main.conftest import (
+    EXPECTED_MAIN_PATH,
+    YAML_DATA_PATH,
+    run_main_and_assert,
+)
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+assert_file_content = create_assert_file_content(EXPECTED_MAIN_PATH)
+
+
+@pytest.mark.benchmark
+def test_main_yaml(output_file: Path) -> None:
+    """Test YAML input file code generation."""
+    run_main_and_assert(
+        input_path=YAML_DATA_PATH / "pet.yaml",
+        output_path=output_file,
+        input_file_type="yaml",
+        assert_func=assert_file_content,
+    )
+
+
+def test_main_yaml_invalid_root_list(output_file: Path, capsys: pytest.CaptureFixture[str]) -> None:
+    """Test YAML file with list as root element fails with invalid file format error."""
+    run_main_and_assert(
+        input_path=YAML_DATA_PATH / "invalid_root_list.yaml",
+        output_path=output_file,
+        input_file_type="yaml",
+        expected_exit=Exit.ERROR,
+        capsys=capsys,
+        expected_stderr_contains="Invalid file format",
+    )
diff -pruN 0.26.4-3/tests/main/test_types.py 0.45.0-1/tests/main/test_types.py
--- 0.26.4-3/tests/main/test_types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/main/test_types.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,63 @@
+"""Tests for DataType import generation."""
+
+from __future__ import annotations
+
+from datamodel_code_generator.format import PythonVersionMin
+from datamodel_code_generator.imports import (
+    IMPORT_LITERAL,
+    IMPORT_OPTIONAL,
+)
+from datamodel_code_generator.types import DataType
+
+
+def test_imports_with_literal_one() -> None:
+    """Test imports for a DataType with single literal value."""
+    data_type = DataType(literals=[""], python_version=PythonVersionMin)
+
+    # Convert iterator to list for assertion
+    imports = list(data_type.imports)
+    assert IMPORT_LITERAL in imports
+    assert len(imports) == 1
+
+
+def test_imports_with_literal_one_and_optional() -> None:
+    """Test imports for an optional DataType with single literal value."""
+    data_type = DataType(literals=[""], is_optional=True, python_version=PythonVersionMin)
+
+    imports = list(data_type.imports)
+    assert IMPORT_LITERAL in imports
+    assert IMPORT_OPTIONAL in imports
+    assert len(imports) == 2
+
+
+def test_imports_with_literal_empty() -> None:
+    """Test imports for a DataType with no literal values."""
+    data_type = DataType(literals=[], python_version=PythonVersionMin)
+
+    imports = list(data_type.imports)
+    assert len(imports) == 0
+
+
+def test_imports_with_nested_dict_key() -> None:
+    """Test imports for a DataType with dict_key containing literals."""
+    dict_key_type = DataType(literals=["key"], python_version=PythonVersionMin)
+
+    data_type = DataType(python_version=PythonVersionMin, dict_key=dict_key_type)
+
+    imports = list(data_type.imports)
+    assert IMPORT_LITERAL in imports
+    assert len(imports) == 1
+
+
+def test_imports_without_duplicate_literals() -> None:
+    """Test that literal import is not duplicated."""
+    dict_key_type = DataType(literals=["key1"], python_version=PythonVersionMin)
+
+    data_type = DataType(
+        literals=["key2"],
+        python_version=PythonVersionMin,
+        dict_key=dict_key_type,
+    )
+
+    imports = list(data_type.imports)
+    assert IMPORT_LITERAL in imports
diff -pruN 0.26.4-3/tests/model/__init__.py 0.45.0-1/tests/model/__init__.py
--- 0.26.4-3/tests/model/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/model/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1 @@
+"""Model unit tests package."""
diff -pruN 0.26.4-3/tests/model/dataclass/__init__.py 0.45.0-1/tests/model/dataclass/__init__.py
--- 0.26.4-3/tests/model/dataclass/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/model/dataclass/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1 @@
+"""Dataclass model unit tests package."""
diff -pruN 0.26.4-3/tests/model/dataclass/test_param.py 0.45.0-1/tests/model/dataclass/test_param.py
--- 0.26.4-3/tests/model/dataclass/test_param.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/model/dataclass/test_param.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,232 @@
+"""Tests for dataclass parameter options (frozen, keyword_only)."""
+
+from __future__ import annotations
+
+from datamodel_code_generator.model.dataclass import DataClass, DataModelField
+from datamodel_code_generator.reference import Reference
+from datamodel_code_generator.types import DataType
+
+
+def test_dataclass_without_frozen() -> None:
+    """Test dataclass generation without frozen parameter."""
+    reference = Reference(path="TestModel", name="TestModel")
+    field = DataModelField(
+        name="field1",
+        data_type=DataType(type="str"),
+        required=True,
+    )
+
+    dataclass = DataClass(
+        reference=reference,
+        fields=[field],
+        frozen=False,
+    )
+
+    rendered = dataclass.render()
+    assert "@dataclass" in rendered
+    assert "frozen=True" not in rendered
+    assert "class TestModel:" in rendered
+
+
+def test_dataclass_with_frozen() -> None:
+    """Test dataclass generation with frozen=True."""
+    reference = Reference(path="TestModel", name="TestModel")
+    field = DataModelField(
+        name="field1",
+        data_type=DataType(type="str"),
+        required=True,
+    )
+
+    dataclass = DataClass(
+        reference=reference,
+        fields=[field],
+        frozen=True,
+    )
+
+    rendered = dataclass.render()
+    assert "@dataclass(frozen=True)" in rendered
+    assert "class TestModel:" in rendered
+
+
+def test_dataclass_with_keyword_only_and_frozen() -> None:
+    """Test dataclass generation with both keyword_only and frozen parameters."""
+    reference = Reference(path="TestModel", name="TestModel")
+    field = DataModelField(
+        name="field1",
+        data_type=DataType(type="str"),
+        required=True,
+    )
+
+    dataclass = DataClass(
+        reference=reference,
+        fields=[field],
+        keyword_only=True,
+        frozen=True,
+    )
+
+    rendered = dataclass.render()
+    assert "@dataclass" in rendered
+    assert "kw_only=True" in rendered
+    assert "frozen=True" in rendered
+    assert "class TestModel:" in rendered
+
+
+def test_dataclass_with_only_keyword_only() -> None:
+    """Test dataclass generation with only keyword_only parameter."""
+    reference = Reference(path="TestModel", name="TestModel")
+    field = DataModelField(
+        name="field1",
+        data_type=DataType(type="str"),
+        required=True,
+    )
+
+    dataclass = DataClass(
+        reference=reference,
+        fields=[field],
+        keyword_only=True,
+        frozen=False,
+    )
+
+    rendered = dataclass.render()
+    assert "@dataclass" in rendered
+    assert "kw_only=True" in rendered
+    assert "frozen=True" not in rendered
+    assert "class TestModel:" in rendered
+
+
+def test_dataclass_frozen_attribute() -> None:
+    """Test that frozen attribute is properly stored."""
+    reference = Reference(path="TestModel", name="TestModel")
+    dataclass = DataClass(
+        reference=reference,
+        fields=[],
+        frozen=True,
+    )
+
+    assert dataclass.frozen is True
+
+
+def test_dataclass_frozen_false_attribute() -> None:
+    """Test that frozen attribute defaults to False."""
+    reference = Reference(path="TestModel", name="TestModel")
+
+    dataclass = DataClass(
+        reference=reference,
+        fields=[],
+    )
+
+    assert dataclass.frozen is False
+
+
+def test_dataclass_kw_only_true_only() -> None:
+    """Test dataclass generation with kw_only=True only (comprehensive test)."""
+    reference = Reference(path="TestModel", name="TestModel")
+    field1 = DataModelField(
+        name="field1",
+        data_type=DataType(type="str"),
+        required=True,
+    )
+    field2 = DataModelField(
+        name="field2",
+        data_type=DataType(type="int"),
+        required=False,
+    )
+
+    dataclass = DataClass(
+        reference=reference,
+        fields=[field1, field2],
+        keyword_only=True,
+    )
+
+    rendered = dataclass.render()
+    # Should have @dataclass(kw_only=True) but not frozen=True
+    assert "@dataclass(kw_only=True)" in rendered
+    assert "frozen=True" not in rendered
+    assert "class TestModel:" in rendered
+
+    # Verify frozen attribute is False (default)
+    assert dataclass.frozen is False
+    assert dataclass.keyword_only is True
+
+
+def test_dataclass_legacy_keyword_only() -> None:
+    """Test that legacy 'frozen' argument is supported if dataclass_arguments is not set."""
+    reference = Reference(path="TestModel", name="TestModel")
+    field = DataModelField(
+        name="field1",
+        data_type=DataType(type="str"),
+        required=True,
+    )
+
+    dataclass = DataClass(
+        reference=reference,
+        fields=[field],
+        keyword_only=True,
+    )
+
+    rendered = dataclass.render()
+    assert "@dataclass(kw_only=True)" in rendered
+
+
+def test_dataclass_legacy_frozen() -> None:
+    """Test that legacy 'frozen' argument is supported if dataclass_arguments is not set."""
+    reference = Reference(path="TestModel", name="TestModel")
+    field = DataModelField(
+        name="field1",
+        data_type=DataType(type="str"),
+        required=True,
+    )
+
+    dataclass = DataClass(
+        reference=reference,
+        fields=[field],
+        frozen=True,
+    )
+
+    rendered = dataclass.render()
+    assert "@dataclass(frozen=True)" in rendered
+
+
+def test_dataclass_with_custom_dataclass_arguments() -> None:
+    """Test that custom dataclass_arguments are rendered correctly."""
+    reference = Reference(path="TestModel", name="TestModel")
+    field = DataModelField(
+        name="field1",
+        data_type=DataType(type="str"),
+        required=True,
+    )
+
+    dataclass = DataClass(
+        reference=reference,
+        fields=[field],
+        dataclass_arguments={"slots": True, "repr": False, "order": True},
+    )
+
+    rendered = dataclass.render()
+    assert "@dataclass(slots=True, order=True)" in rendered
+    assert "repr=False" not in rendered
+
+
+def test_dataclass_both_legacy_and_dataclass_arguments() -> None:
+    """Test that dataclass_arguments take precedence over legacy flags."""
+    reference = Reference(path="TestModel", name="TestModel")
+    field = DataModelField(
+        name="field1",
+        data_type=DataType(type="str"),
+        required=True,
+    )
+
+    dataclass = DataClass(
+        reference=reference,
+        fields=[field],
+        frozen=True,  # legacy flag
+        keyword_only=True,  # legacy flag
+        dataclass_arguments={"frozen": False, "order": True},
+    )
+
+    rendered = dataclass.render()
+    assert "@dataclass(order=True)" in rendered
+    assert "@dataclass(frozen=False)" not in rendered
+    assert "@dataclass(frozen=True)" not in rendered
+    assert "@dataclass(kw_only=False)" not in rendered
+    assert "@dataclass(kw_only=True)" not in rendered
diff -pruN 0.26.4-3/tests/model/pydantic/__init__.py 0.45.0-1/tests/model/pydantic/__init__.py
--- 0.26.4-3/tests/model/pydantic/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/model/pydantic/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1 @@
+"""Pydantic v1 model unit tests package."""
diff -pruN 0.26.4-3/tests/model/pydantic/test_base_model.py 0.45.0-1/tests/model/pydantic/test_base_model.py
--- 0.26.4-3/tests/model/pydantic/test_base_model.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/model/pydantic/test_base_model.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,150 @@
+"""Tests for Pydantic v1 BaseModel generation."""
+
+from __future__ import annotations
+
+from typing import Any
+
+import pytest
+
+from datamodel_code_generator.model.pydantic import DataTypeManager
+from datamodel_code_generator.model.pydantic.base_model import BaseModel, DataModelField
+from datamodel_code_generator.reference import Reference
+from datamodel_code_generator.types import DataType, Types
+
+
+def test_base_model() -> None:
+    """Test basic BaseModel generation with required field."""
+    field = DataModelField(name="a", data_type=DataType(type="str"), required=True)
+
+    base_model = BaseModel(
+        fields=[field],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert base_model.name == "test_model"
+    assert base_model.fields == [field]
+    assert base_model.decorators == []
+    assert base_model.render() == "class test_model(BaseModel):\n    a: str"
+
+
+def test_base_model_optional() -> None:
+    """Test BaseModel generation with optional field and default value."""
+    field = DataModelField(name="a", data_type=DataType(type="str"), default="abc", required=False)
+
+    base_model = BaseModel(
+        fields=[field],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert base_model.name == "test_model"
+    assert base_model.fields == [field]
+    assert base_model.decorators == []
+    assert base_model.render() == "class test_model(BaseModel):\n    a: Optional[str] = 'abc'"
+
+
+def test_base_model_nullable_required() -> None:
+    """Test BaseModel generation with nullable required field."""
+    field = DataModelField(
+        name="a",
+        data_type=DataType(type="str"),
+        default="abc",
+        required=True,
+        nullable=True,
+    )
+
+    base_model = BaseModel(
+        fields=[field],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert base_model.name == "test_model"
+    assert base_model.fields == [field]
+    assert base_model.decorators == []
+    assert base_model.render() == "class test_model(BaseModel):\n    a: Optional[str] = Field(...)"
+
+
+def test_base_model_strict_non_nullable_required() -> None:
+    """Test BaseModel generation with strict non-nullable required field."""
+    field = DataModelField(
+        name="a",
+        data_type=DataType(type="str"),
+        default="abc",
+        required=True,
+        nullable=False,
+    )
+
+    base_model = BaseModel(
+        fields=[field],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert base_model.name == "test_model"
+    assert base_model.fields == [field]
+    assert base_model.decorators == []
+    assert base_model.render() == "class test_model(BaseModel):\n    a: str"
+
+
+def test_base_model_decorator() -> None:
+    """Test BaseModel generation with decorators and base classes."""
+    field = DataModelField(name="a", data_type=DataType(type="str"), default="abc", required=False)
+
+    base_model = BaseModel(
+        fields=[field],
+        decorators=["@validate"],
+        base_classes=[Reference(name="Base", original_name="Base", path="Base")],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert base_model.name == "test_model"
+    assert base_model.fields == [field]
+    assert base_model.base_class == "Base"
+    assert base_model.decorators == ["@validate"]
+    assert base_model.render() == "@validate\nclass test_model(Base):\n    a: Optional[str] = 'abc'"
+
+
+def test_base_model_get_data_type() -> None:
+    """Test data type retrieval for BaseModel fields."""
+    data_type_manager = DataTypeManager()
+    assert data_type_manager.get_data_type(Types.integer) == data_type_manager.data_type(type="int")
+
+
+@pytest.mark.parametrize(
+    ("kwargs", "expected"),
+    [
+        ({"required": True}, None),
+        ({"required": True, "example": "example"}, None),
+        ({"example": "example"}, None),
+        (
+            {"required": True, "default": 123, "example": "example"},
+            None,
+        ),
+        (
+            {"required": False, "default": 123, "example": "example"},
+            None,
+        ),
+        ({"description": "description"}, None),
+        ({"title": "title"}, None),
+        ({"alias": "alias"}, "Field(None, alias='alias')"),
+        ({"example": True}, None),
+        ({"examples": True}, None),
+        (
+            {
+                "example": True,
+                "description": "description",
+                "title": "title",
+                "alias": "alias",
+            },
+            "Field(None, alias='alias')",
+        ),
+        ({"examples": [1, 2, 3]}, None),
+        (
+            {"examples": {"name": "dog", "age": 1}},
+            None,
+        ),
+        ({"default": "abc", "title": "title"}, None),
+        ({"default": 123, "title": "title"}, None),
+    ],
+)
+def test_data_model_field(kwargs: dict[str, Any], expected: str | None) -> None:
+    """Test DataModelField generation with various field attributes."""
+    assert DataModelField(**kwargs, data_type=DataType()).field == expected
diff -pruN 0.26.4-3/tests/model/pydantic/test_constraint.py 0.45.0-1/tests/model/pydantic/test_constraint.py
--- 0.26.4-3/tests/model/pydantic/test_constraint.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/model/pydantic/test_constraint.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+"""Tests for Pydantic constraint detection."""
+
+from __future__ import annotations
+
+import pytest
+
+from datamodel_code_generator.model.pydantic.base_model import Constraints
+from datamodel_code_generator.types import UnionIntFloat
+
+
+@pytest.mark.parametrize(
+    ("gt", "expected"),
+    [
+        (None, False),
+        (4, True),
+        (0, True),
+        (0.0, True),
+    ],
+)
+def test_constraint(gt: float | None, expected: bool) -> None:
+    """Test constraint detection with greater-than values."""
+    constraints = Constraints()
+    constraints.gt = UnionIntFloat(gt) if gt is not None else None
+    assert constraints.has_constraints == expected
diff -pruN 0.26.4-3/tests/model/pydantic/test_custom_root_type.py 0.45.0-1/tests/model/pydantic/test_custom_root_type.py
--- 0.26.4-3/tests/model/pydantic/test_custom_root_type.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/model/pydantic/test_custom_root_type.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,75 @@
+"""Tests for Pydantic v1 CustomRootType generation."""
+
+from __future__ import annotations
+
+from datamodel_code_generator.model import DataModelFieldBase
+from datamodel_code_generator.model.pydantic import DataTypeManager
+from datamodel_code_generator.model.pydantic.custom_root_type import CustomRootType
+from datamodel_code_generator.reference import Reference
+from datamodel_code_generator.types import DataType, Types
+
+
+def test_custom_root_type() -> None:
+    """Test CustomRootType generation with optional field."""
+    custom_root_type = CustomRootType(
+        fields=[
+            DataModelFieldBase(
+                name="a",
+                data_type=DataType(type="str"),
+                default="abc",
+                required=False,
+            )
+        ],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert custom_root_type.name == "test_model"
+    assert custom_root_type.fields == [
+        DataModelFieldBase(
+            name="a",
+            data_type=DataType(type="str"),
+            default="abc",
+            required=False,
+            parent=custom_root_type,
+        )
+    ]
+
+    assert custom_root_type.render() == ("class test_model(BaseModel):\n    __root__: Optional[str] = 'abc'")
+
+
+def test_custom_root_type_required() -> None:
+    """Test CustomRootType generation with required field."""
+    custom_root_type = CustomRootType(
+        fields=[DataModelFieldBase(data_type=DataType(type="str"), required=True)],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert custom_root_type.name == "test_model"
+    assert custom_root_type.fields == [
+        DataModelFieldBase(data_type=DataType(type="str"), required=True, parent=custom_root_type)
+    ]
+
+    assert custom_root_type.render() == ("class test_model(BaseModel):\n    __root__: str")
+
+
+def test_custom_root_type_decorator() -> None:
+    """Test CustomRootType generation with decorators and base classes."""
+    custom_root_type = CustomRootType(
+        fields=[DataModelFieldBase(data_type=DataType(type="str"), required=True)],
+        decorators=["@validate"],
+        base_classes=[Reference(name="Base", original_name="Base", path="Base")],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert custom_root_type.name == "test_model"
+    assert custom_root_type.fields == [
+        DataModelFieldBase(data_type=DataType(type="str"), required=True, parent=custom_root_type)
+    ]
+    assert custom_root_type.base_class == "Base"
+    assert custom_root_type.render() == "@validate\nclass test_model(Base):\n    __root__: str"
+
+
+def test_custom_root_type_get_data_type() -> None:
+    """Test data type retrieval for CustomRootType fields."""
+    data_type_manager = DataTypeManager()
+    assert data_type_manager.get_data_type(Types.integer) == data_type_manager.data_type(type="int")
diff -pruN 0.26.4-3/tests/model/pydantic/test_data_class.py 0.45.0-1/tests/model/pydantic/test_data_class.py
--- 0.26.4-3/tests/model/pydantic/test_data_class.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/model/pydantic/test_data_class.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,61 @@
+"""Tests for Pydantic dataclass generation."""
+
+from __future__ import annotations
+
+from datamodel_code_generator.model import DataModelFieldBase
+from datamodel_code_generator.model.pydantic import DataTypeManager
+from datamodel_code_generator.model.pydantic.dataclass import DataClass
+from datamodel_code_generator.reference import Reference
+from datamodel_code_generator.types import DataType, Types
+
+
+def test_data_class() -> None:
+    """Test basic DataClass generation with required field."""
+    field = DataModelFieldBase(name="a", data_type=DataType(type="str"), required=True)
+
+    data_class = DataClass(
+        fields=[field],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert data_class.name == "test_model"
+    assert data_class.fields == [field]
+    assert data_class.decorators == []
+    assert data_class.render() == "@dataclass\nclass test_model:\n    a: str"
+
+
+def test_data_class_base_class() -> None:
+    """Test DataClass generation with base class inheritance."""
+    field = DataModelFieldBase(name="a", data_type=DataType(type="str"), required=True)
+
+    data_class = DataClass(
+        fields=[field],
+        base_classes=[Reference(name="Base", original_name="Base", path="Base")],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert data_class.name == "test_model"
+    assert data_class.fields == [field]
+    assert data_class.decorators == []
+    assert data_class.render() == "@dataclass\nclass test_model(Base):\n    a: str"
+
+
+def test_data_class_optional() -> None:
+    """Test DataClass generation with field default value."""
+    field = DataModelFieldBase(name="a", data_type=DataType(type="str"), default="'abc'", required=True)
+
+    data_class = DataClass(
+        fields=[field],
+        reference=Reference(name="test_model", path="test_model"),
+    )
+
+    assert data_class.name == "test_model"
+    assert data_class.fields == [field]
+    assert data_class.decorators == []
+    assert data_class.render() == "@dataclass\nclass test_model:\n    a: str = 'abc'"
+
+
+def test_data_class_get_data_type() -> None:
+    """Test data type retrieval for DataClass fields."""
+    data_type_manager = DataTypeManager()
+    assert data_type_manager.get_data_type(Types.integer) == data_type_manager.data_type(type="int")
diff -pruN 0.26.4-3/tests/model/pydantic/test_types.py 0.45.0-1/tests/model/pydantic/test_types.py
--- 0.26.4-3/tests/model/pydantic/test_types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/model/pydantic/test_types.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,492 @@
+"""Tests for Pydantic type generation and constraints."""
+
+from __future__ import annotations
+
+from decimal import Decimal
+from typing import Any
+
+import pytest
+
+from datamodel_code_generator.imports import Import
+from datamodel_code_generator.model.pydantic.imports import (
+    IMPORT_CONDECIMAL,
+    IMPORT_CONFLOAT,
+    IMPORT_CONINT,
+    IMPORT_CONSTR,
+    IMPORT_NEGATIVE_FLOAT,
+    IMPORT_NEGATIVE_INT,
+    IMPORT_NON_NEGATIVE_FLOAT,
+    IMPORT_NON_NEGATIVE_INT,
+    IMPORT_NON_POSITIVE_FLOAT,
+    IMPORT_NON_POSITIVE_INT,
+    IMPORT_POSITIVE_FLOAT,
+    IMPORT_POSITIVE_INT,
+)
+from datamodel_code_generator.model.pydantic.types import DataTypeManager
+from datamodel_code_generator.types import DataType, Types, UnionIntFloat
+
+
+@pytest.mark.parametrize(
+    ("types", "use_non_positive_negative_number_constrained_types", "params", "data_type"),
+    [
+        (Types.integer, False, {}, {"type": "int"}),
+        (
+            Types.integer,
+            False,
+            {"maximum": 10},
+            {
+                "type": "conint",
+                "is_func": True,
+                "kwargs": {"le": 10},
+                "import_": IMPORT_CONINT,
+            },
+        ),
+        (
+            Types.integer,
+            False,
+            {"exclusiveMaximum": 10},
+            {
+                "type": "conint",
+                "is_func": True,
+                "kwargs": {"lt": 10},
+                "import_": IMPORT_CONINT,
+            },
+        ),
+        (
+            Types.integer,
+            False,
+            {"minimum": 10},
+            {
+                "type": "conint",
+                "is_func": True,
+                "kwargs": {"ge": 10},
+                "import_": IMPORT_CONINT,
+            },
+        ),
+        (
+            Types.integer,
+            False,
+            {"exclusiveMinimum": 10},
+            {
+                "type": "conint",
+                "is_func": True,
+                "kwargs": {"gt": 10},
+                "import_": IMPORT_CONINT,
+            },
+        ),
+        (
+            Types.integer,
+            False,
+            {"multipleOf": 10},
+            {
+                "type": "conint",
+                "is_func": True,
+                "kwargs": {"multiple_of": 10},
+                "import_": IMPORT_CONINT,
+            },
+        ),
+        (
+            Types.integer,
+            False,
+            {"exclusiveMinimum": 0},
+            {"type": "PositiveInt", "import_": IMPORT_POSITIVE_INT},
+        ),
+        (
+            Types.integer,
+            False,
+            {"exclusiveMaximum": 0},
+            {"type": "NegativeInt", "import_": IMPORT_NEGATIVE_INT},
+        ),
+        (
+            Types.integer,
+            True,
+            {"minimum": 0},
+            {"type": "NonNegativeInt", "import_": IMPORT_NON_NEGATIVE_INT},
+        ),
+        (
+            Types.integer,
+            True,
+            {"maximum": 0},
+            {"type": "NonPositiveInt", "import_": IMPORT_NON_POSITIVE_INT},
+        ),
+        (
+            Types.integer,
+            False,
+            {"minimum": 0},
+            {
+                "type": "conint",
+                "is_func": True,
+                "kwargs": {"ge": 0},
+                "import_": IMPORT_CONINT,
+            },
+        ),
+        (
+            Types.integer,
+            False,
+            {"maximum": 0},
+            {
+                "type": "conint",
+                "is_func": True,
+                "kwargs": {"le": 0},
+                "import_": IMPORT_CONINT,
+            },
+        ),
+    ],
+)
+def test_get_data_int_type(
+    types: Types,
+    use_non_positive_negative_number_constrained_types: bool,
+    params: dict[str, Any],
+    data_type: dict[str, Any],
+) -> None:
+    """Test integer data type generation with various constraints."""
+    data_type_manager = DataTypeManager(
+        use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types
+    )
+    assert (
+        data_type_manager.get_data_int_type(types, **params).dict() == data_type_manager.data_type(**data_type).dict()
+    )
+
+
+@pytest.mark.parametrize(
+    ("types", "use_non_positive_negative_number_constrained_types", "params", "data_type"),
+    [
+        (Types.float, False, {}, {"type": "float"}),
+        (
+            Types.float,
+            False,
+            {"maximum": 10},
+            {
+                "type": "confloat",
+                "is_func": True,
+                "kwargs": {"le": 10},
+                "import_": IMPORT_CONFLOAT,
+            },
+        ),
+        (
+            Types.float,
+            False,
+            {"exclusiveMaximum": 10},
+            {
+                "type": "confloat",
+                "is_func": True,
+                "kwargs": {"lt": 10.0},
+                "import_": IMPORT_CONFLOAT,
+            },
+        ),
+        (
+            Types.float,
+            False,
+            {"minimum": 10},
+            {
+                "type": "confloat",
+                "is_func": True,
+                "kwargs": {"ge": 10.0},
+                "import_": IMPORT_CONFLOAT,
+            },
+        ),
+        (
+            Types.float,
+            False,
+            {"exclusiveMinimum": 10},
+            {
+                "type": "confloat",
+                "is_func": True,
+                "kwargs": {"gt": 10.0},
+                "import_": IMPORT_CONFLOAT,
+            },
+        ),
+        (
+            Types.float,
+            False,
+            {"multipleOf": 10},
+            {
+                "type": "confloat",
+                "is_func": True,
+                "kwargs": {"multiple_of": 10.0},
+                "import_": IMPORT_CONFLOAT,
+            },
+        ),
+        (
+            Types.float,
+            False,
+            {"exclusiveMinimum": 0},
+            {"type": "PositiveFloat", "import_": IMPORT_POSITIVE_FLOAT},
+        ),
+        (
+            Types.float,
+            False,
+            {"exclusiveMaximum": 0},
+            {"type": "NegativeFloat", "import_": IMPORT_NEGATIVE_FLOAT},
+        ),
+        (
+            Types.float,
+            True,
+            {"maximum": 0},
+            {"type": "NonPositiveFloat", "import_": IMPORT_NON_POSITIVE_FLOAT},
+        ),
+        (
+            Types.float,
+            True,
+            {"minimum": 0},
+            {"type": "NonNegativeFloat", "import_": IMPORT_NON_NEGATIVE_FLOAT},
+        ),
+        (
+            Types.float,
+            False,
+            {"maximum": 0},
+            {
+                "type": "confloat",
+                "is_func": True,
+                "kwargs": {"le": 0.0},
+                "import_": IMPORT_CONFLOAT,
+            },
+        ),
+        (
+            Types.float,
+            False,
+            {"minimum": 0},
+            {
+                "type": "confloat",
+                "is_func": True,
+                "kwargs": {"ge": 0.0},
+                "import_": IMPORT_CONFLOAT,
+            },
+        ),
+    ],
+)
+def test_get_data_float_type(
+    types: Types,
+    use_non_positive_negative_number_constrained_types: bool,
+    params: dict[str, Any],
+    data_type: dict[str, Any],
+) -> None:
+    """Test float data type generation with various constraints."""
+    data_type_manager = DataTypeManager(
+        use_non_positive_negative_number_constrained_types=use_non_positive_negative_number_constrained_types
+    )
+    assert data_type_manager.get_data_float_type(types, **params) == data_type_manager.data_type(**data_type)
+
+
+@pytest.mark.parametrize(
+    ("types", "params", "data_type"),
+    [
+        (
+            Types.decimal,
+            {},
+            {"type": "Decimal", "import_": Import(from_="decimal", import_="Decimal")},
+        ),
+        (
+            Types.decimal,
+            {"maximum": 10},
+            {
+                "type": "condecimal",
+                "is_func": True,
+                "kwargs": {"le": 10},
+                "import_": IMPORT_CONDECIMAL,
+            },
+        ),
+        (
+            Types.decimal,
+            {"exclusiveMaximum": 10},
+            {
+                "type": "condecimal",
+                "is_func": True,
+                "kwargs": {"lt": 10},
+                "import_": IMPORT_CONDECIMAL,
+            },
+        ),
+        (
+            Types.decimal,
+            {"minimum": 10},
+            {
+                "type": "condecimal",
+                "is_func": True,
+                "kwargs": {"ge": 10},
+                "import_": IMPORT_CONDECIMAL,
+            },
+        ),
+        (
+            Types.decimal,
+            {"exclusiveMinimum": 10},
+            {
+                "type": "condecimal",
+                "is_func": True,
+                "kwargs": {"gt": 10},
+                "import_": IMPORT_CONDECIMAL,
+            },
+        ),
+        (
+            Types.decimal,
+            {"multipleOf": 10},
+            {
+                "type": "condecimal",
+                "is_func": True,
+                "kwargs": {"multiple_of": 10},
+                "import_": IMPORT_CONDECIMAL,
+            },
+        ),
+        (
+            Types.decimal,
+            {"minimum": UnionIntFloat(10.01)},
+            {
+                "type": "condecimal",
+                "is_func": True,
+                "kwargs": {"ge": Decimal("10.01")},
+                "import_": IMPORT_CONDECIMAL,
+            },
+        ),
+    ],
+)
+def test_get_data_decimal_type(types: Types, params: dict[str, Any], data_type: dict[str, Any]) -> None:
+    """Test decimal data type generation with various constraints."""
+    data_type_manager = DataTypeManager()
+    assert data_type_manager.get_data_decimal_type(types, **params) == data_type_manager.data_type(**data_type)
+
+
+@pytest.mark.parametrize(
+    ("types", "params", "data_type"),
+    [
+        (
+            Types.float,
+            {"multipleOf": 0.1},
+            {
+                "type": "condecimal",
+                "is_func": True,
+                "kwargs": {"multiple_of": Decimal("0.1")},
+                "import_": IMPORT_CONDECIMAL,
+            },
+        ),
+        (
+            Types.float,
+            {"multipleOf": 0.1, "minimum": 0, "maximum": 100},
+            {
+                "type": "condecimal",
+                "is_func": True,
+                "kwargs": {"multiple_of": Decimal("0.1"), "ge": Decimal(0), "le": Decimal(100)},
+                "import_": IMPORT_CONDECIMAL,
+            },
+        ),
+        (
+            Types.number,
+            {"multipleOf": 0.01, "exclusiveMinimum": 0},
+            {
+                "type": "condecimal",
+                "is_func": True,
+                "kwargs": {"multiple_of": Decimal("0.01"), "gt": Decimal(0)},
+                "import_": IMPORT_CONDECIMAL,
+            },
+        ),
+    ],
+)
+def test_get_data_float_type_with_use_decimal_for_multiple_of(
+    types: Types, params: dict[str, Any], data_type: dict[str, Any]
+) -> None:
+    """Test float type uses condecimal when use_decimal_for_multiple_of is True."""
+    data_type_manager = DataTypeManager(use_decimal_for_multiple_of=True)
+    assert data_type_manager.get_data_float_type(types, **params) == data_type_manager.data_type(**data_type)
+
+
+@pytest.mark.parametrize(
+    ("types", "params", "data_type"),
+    [
+        (Types.string, {}, {"type": "str"}),
+        (
+            Types.string,
+            {"pattern": "^abc"},
+            {
+                "type": "constr",
+                "is_func": True,
+                "kwargs": {"regex": "r'^abc'"},
+                "import_": IMPORT_CONSTR,
+            },
+        ),
+        (
+            Types.string,
+            {"minLength": 10},
+            {
+                "type": "constr",
+                "is_func": True,
+                "kwargs": {"min_length": 10},
+                "import_": IMPORT_CONSTR,
+            },
+        ),
+        (
+            Types.string,
+            {"maxLength": 10},
+            {
+                "type": "constr",
+                "is_func": True,
+                "kwargs": {"max_length": 10},
+                "import_": IMPORT_CONSTR,
+            },
+        ),
+    ],
+)
+def test_get_data_str_type(types: Types, params: dict[str, Any], data_type: dict[str, Any]) -> None:
+    """Test string data type generation with various constraints."""
+    data_type_manager = DataTypeManager()
+    assert data_type_manager.get_data_str_type(types, **params) == data_type_manager.data_type(**data_type)
+
+
+@pytest.mark.parametrize(
+    ("types", "data_type"),
+    [
+        (Types.string, {"type": "str"}),
+        (Types.integer, {"type": "int"}),
+        (Types.float, {"type": "float"}),
+        (Types.boolean, {"type": "bool"}),
+        (
+            Types.decimal,
+            {"type": "Decimal", "import_": Import(from_="decimal", import_="Decimal")},
+        ),
+    ],
+)
+def test_get_data_type(types: Types, data_type: dict[str, str]) -> None:
+    """Test basic data type retrieval for common types."""
+    data_type_manager = DataTypeManager()
+    assert data_type_manager.get_data_type(types) == data_type_manager.data_type(**data_type)
+
+
+def test_data_type_type_hint() -> None:
+    """Test type hint generation for DataType objects."""
+    assert DataType(type="str").type_hint == "str"
+    assert DataType(type="constr", is_func=True).type_hint == "constr()"
+    assert DataType(type="constr", is_func=True, kwargs={"min_length": 10}).type_hint == "constr(min_length=10)"
+
+
+@pytest.mark.parametrize(
+    ("types", "data_type"),
+    [
+        ("string", {"type": "str"}),
+        (10, {"type": "int"}),
+        (20.3, {"type": "float"}),
+        (True, {"type": "bool"}),
+    ],
+)
+def test_get_data_type_from_value(types: Any, data_type: dict[str, str]) -> None:
+    """Test data type inference from Python values."""
+    data_type_manager = DataTypeManager()
+    assert data_type_manager.get_data_type_from_value(types) == data_type_manager.data_type(**data_type)
+
+
+@pytest.mark.parametrize(
+    ("types", "data_type"),
+    [
+        (
+            [1, 2, 3],
+            ("typing.List", False),
+        ),
+        (
+            {"a": 1, "b": 2, "c": 3},
+            ("typing.Dict", False),
+        ),
+        (None, ("typing.Any", False)),
+    ],
+)
+def test_get_data_type_from_full_path(types: Any, data_type: tuple[str, bool]) -> None:
+    """Test data type generation from full module paths."""
+    data_type_manager = DataTypeManager()
+    assert data_type_manager.get_data_type_from_value(types) == data_type_manager.get_data_type_from_full_path(
+        *data_type
+    )
diff -pruN 0.26.4-3/tests/model/pydantic_v2/__init__.py 0.45.0-1/tests/model/pydantic_v2/__init__.py
--- 0.26.4-3/tests/model/pydantic_v2/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/model/pydantic_v2/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1 @@
+"""Pydantic v2 model unit tests package."""
diff -pruN 0.26.4-3/tests/model/pydantic_v2/test_root_model.py 0.45.0-1/tests/model/pydantic_v2/test_root_model.py
--- 0.26.4-3/tests/model/pydantic_v2/test_root_model.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/model/pydantic_v2/test_root_model.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,69 @@
+"""Tests for Pydantic v2 RootModel generation."""
+
+from __future__ import annotations
+
+from datamodel_code_generator.model import DataModelFieldBase
+from datamodel_code_generator.model.pydantic_v2.root_model import RootModel
+from datamodel_code_generator.reference import Reference
+from datamodel_code_generator.types import DataType
+
+
+def test_root_model() -> None:
+    """Test RootModel generation with optional field."""
+    root_model = RootModel(
+        fields=[
+            DataModelFieldBase(
+                name="a",
+                data_type=DataType(type="str"),
+                default="abc",
+                required=False,
+            )
+        ],
+        reference=Reference(name="TestRootModel", path="test_root_model"),
+    )
+
+    assert root_model.name == "TestRootModel"
+    assert root_model.fields == [
+        DataModelFieldBase(
+            name="a",
+            data_type=DataType(type="str"),
+            default="abc",
+            required=False,
+            parent=root_model,
+        )
+    ]
+
+    assert root_model.base_class == "RootModel"
+    assert root_model.custom_base_class is None
+    assert root_model.render() == ("class TestRootModel(RootModel[Optional[str]]):\n    root: Optional[str] = 'abc'")
+
+
+def test_root_model_custom_base_class_is_ignored() -> None:
+    """Verify that passing a custom_base_class is ignored."""
+    root_model = RootModel(
+        custom_base_class="test.Test",
+        fields=[
+            DataModelFieldBase(
+                name="a",
+                data_type=DataType(type="str"),
+                default="abc",
+                required=False,
+            )
+        ],
+        reference=Reference(name="TestRootModel", path="test_root_model"),
+    )
+
+    assert root_model.name == "TestRootModel"
+    assert root_model.fields == [
+        DataModelFieldBase(
+            name="a",
+            data_type=DataType(type="str"),
+            default="abc",
+            required=False,
+            parent=root_model,
+        )
+    ]
+
+    assert root_model.base_class == "RootModel"
+    assert root_model.custom_base_class is None  # make sure it's ignored
+    assert root_model.render() == ("class TestRootModel(RootModel[Optional[str]]):\n    root: Optional[str] = 'abc'")
diff -pruN 0.26.4-3/tests/model/test_base.py 0.45.0-1/tests/model/test_base.py
--- 0.26.4-3/tests/model/test_base.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/model/test_base.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,340 @@
+"""Tests for base model classes and utilities."""
+
+from __future__ import annotations
+
+from pathlib import Path
+from tempfile import NamedTemporaryFile
+from typing import Any
+
+import pytest
+
+from datamodel_code_generator.model.base import (
+    DataModel,
+    DataModelFieldBase,
+    TemplateBase,
+    get_module_path,
+    sanitize_module_name,
+)
+from datamodel_code_generator.reference import Reference
+from datamodel_code_generator.types import DataType, Types
+
+
+class A(TemplateBase):
+    """Test helper class for TemplateBase testing."""
+
+    def __init__(self, path: Path) -> None:
+        """Initialize with template file path."""
+        self._path = path
+
+    @property
+    def template_file_path(self) -> Path:
+        """Return the template file path."""
+        return self._path
+
+    def render(self) -> str:  # noqa: PLR6301
+        """Render the template."""
+        return ""
+
+
+class B(DataModel):
+    """Test helper class for DataModel testing with template path."""
+
+    @classmethod
+    def get_data_type(cls, types: Types, **kwargs: Any) -> DataType:  # noqa: D102
+        pass
+
+    def __init__(self, *args: Any, **kwargs: Any) -> None:  # noqa: D107
+        super().__init__(*args, **kwargs)
+
+    TEMPLATE_FILE_PATH = ""
+
+
+class C(DataModel):
+    """Test helper class for DataModel testing without template path."""
+
+    @classmethod
+    def get_data_type(cls, types: Types, **kwargs: Any) -> DataType:  # noqa: D102
+        pass
+
+
+template: str = """{%- for decorator in decorators -%}
+{{ decorator }}
+{%- endfor %}
+@dataclass
+class {{ class_name }}:
+{%- for field in fields -%}
+    {%- if field.required %}
+    {{ field.name }}: {{ field.type_hint }}
+    {%- else %}
+    {{ field.name }}: {{ field.type_hint }} = {{field.default}}
+    {%- endif %}
+{%- endfor -%}"""
+
+
+def test_template_base() -> None:
+    """Test TemplateBase rendering and file path handling."""
+    with NamedTemporaryFile("w", delete=False, encoding="utf-8") as dummy_template:
+        dummy_template.write("abc")
+        dummy_template.seek(0)
+        dummy_template.close()
+        a: TemplateBase = A(Path(dummy_template.name))
+    assert str(a.template_file_path) == dummy_template.name
+    assert a._render() == "abc"
+    assert not str(a)
+
+
+def test_data_model() -> None:
+    """Test DataModel rendering with fields and decorators."""
+    field = DataModelFieldBase(name="a", data_type=DataType(type="str"), default="abc", required=True)
+
+    with NamedTemporaryFile("w", delete=False, encoding="utf-8") as dummy_template:
+        dummy_template.write(template)
+        dummy_template.seek(0)
+        dummy_template.close()
+        B.TEMPLATE_FILE_PATH = dummy_template.name
+        data_model = B(
+            fields=[field],
+            decorators=["@validate"],
+            base_classes=[Reference(path="base", original_name="base", name="Base")],
+            reference=Reference(path="test_model", name="test_model"),
+        )
+
+    assert data_model.name == "test_model"
+    assert data_model.fields == [field]
+    assert data_model.decorators == ["@validate"]
+    assert data_model.base_class == "Base"
+    assert data_model.render() == "@validate\n@dataclass\nclass test_model:\n    a: str"
+
+
+def test_data_model_exception() -> None:
+    """Test DataModel raises exception when TEMPLATE_FILE_PATH is undefined."""
+    field = DataModelFieldBase(name="a", data_type=DataType(type="str"), default="abc", required=True)
+    with pytest.raises(Exception, match="TEMPLATE_FILE_PATH is undefined"):
+        C(
+            fields=[field],
+            reference=Reference(path="abc", original_name="abc", name="abc"),
+        )
+
+
+def test_data_field() -> None:
+    """Test DataModelFieldBase type hint generation for various configurations."""
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(is_list=True),
+        required=True,
+        is_list=True,
+        is_union=True,
+    )
+    assert field.type_hint == "List"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(is_list=True),
+        required=True,
+        is_list=True,
+        is_union=False,
+    )
+    assert field.type_hint == "List"
+    field = DataModelFieldBase(name="a", data_type=DataType(), required=False)
+    assert field.type_hint == "None"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(is_list=True),
+        required=False,
+        is_list=True,
+        is_union=True,
+    )
+    assert field.type_hint == "Optional[List]"
+    field = DataModelFieldBase(name="a", data_type=DataType(), required=False, is_list=False, is_union=True)
+    assert field.type_hint == "None"
+    field = DataModelFieldBase(name="a", data_type=DataType(), required=False, is_list=False, is_union=False)
+    assert field.type_hint == "None"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(is_list=True),
+        required=False,
+        is_list=True,
+        is_union=False,
+    )
+    assert field.type_hint == "Optional[List]"
+    field = DataModelFieldBase(name="a", data_type=DataType(type="str"), required=True)
+    assert field.type_hint == "str"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(type="str", is_list=True),
+        required=True,
+    )
+    assert field.type_hint == "List[str]"
+    field = DataModelFieldBase(name="a", data_type=DataType(type="str"), required=True)
+    assert field.type_hint == "str"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(type="str"),
+        required=True,
+    )
+    assert field.type_hint == "str"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(type="str", is_list=True),
+        required=True,
+    )
+    assert field.type_hint == "List[str]"
+    field = DataModelFieldBase(name="a", data_type=DataType(type="str"), required=False)
+    assert field.type_hint == "Optional[str]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(
+            type="str",
+            is_list=True,
+        ),
+        required=False,
+    )
+    assert field.type_hint == "Optional[List[str]]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(type="str"),
+        required=False,
+    )
+    assert field.type_hint == "Optional[str]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(type="str"),
+        required=False,
+    )
+    assert field.type_hint == "Optional[str]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(
+            type="str",
+            is_list=True,
+        ),
+        required=False,
+    )
+    assert field.type_hint == "Optional[List[str]]"
+
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(data_types=[DataType(type="str"), DataType(type="int")]),
+        required=True,
+    )
+    assert field.type_hint == "Union[str, int]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(
+            data_types=[DataType(type="str"), DataType(type="int")],
+            is_list=True,
+        ),
+        required=True,
+    )
+    assert field.type_hint == "List[Union[str, int]]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(data_types=[DataType(type="str"), DataType(type="int")]),
+        required=True,
+    )
+    assert field.type_hint == "Union[str, int]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(data_types=[DataType(type="str"), DataType(type="int")]),
+        required=True,
+    )
+    assert field.type_hint == "Union[str, int]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(data_types=[DataType(type="str"), DataType(type="int")], is_list=True),
+        required=True,
+    )
+    assert field.type_hint == "List[Union[str, int]]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(data_types=[DataType(type="str"), DataType(type="int")]),
+        required=False,
+    )
+    assert field.type_hint == "Optional[Union[str, int]]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(
+            data_types=[DataType(type="str"), DataType(type="int")],
+            is_list=True,
+        ),
+        required=False,
+    )
+    assert field.type_hint == "Optional[List[Union[str, int]]]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(data_types=[DataType(type="str"), DataType(type="int")]),
+        required=False,
+    )
+    assert field.type_hint == "Optional[Union[str, int]]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(data_types=[DataType(type="str"), DataType(type="int")]),
+        required=False,
+    )
+    assert field.type_hint == "Optional[Union[str, int]]"
+    field = DataModelFieldBase(
+        name="a",
+        data_type=DataType(data_types=[DataType(type="str"), DataType(type="int")], is_list=True),
+        required=False,
+    )
+    assert field.type_hint == "Optional[List[Union[str, int]]]"
+
+    field = DataModelFieldBase(name="a", data_type=DataType(is_list=True), required=False)
+    assert field.type_hint == "Optional[List]"
+
+
+@pytest.mark.parametrize(
+    ("name", "expected_true", "expected_false"),
+    [
+        ("array-commons.schema", "array_commons.schema", "array_commons_schema"),
+        ("123filename", "_123filename", "_123filename"),
+        ("normal_filename", "normal_filename", "normal_filename"),
+        ("file!name", "file_name", "file_name"),
+        ("", "", ""),
+    ],
+)
+@pytest.mark.parametrize("treat_dot_as_module", [True, False])
+def test_sanitize_module_name(name: str, expected_true: str, expected_false: str, treat_dot_as_module: bool) -> None:
+    """Test module name sanitization with different characters and options."""
+    expected = expected_true if treat_dot_as_module else expected_false
+    assert sanitize_module_name(name, treat_dot_as_module=treat_dot_as_module) == expected
+
+
+@pytest.mark.parametrize(
+    ("treat_dot_as_module", "expected"),
+    [
+        (True, ["inputs", "array_commons.schema", "array-commons"]),
+        (False, ["inputs", "array_commons_schema", "array-commons"]),
+    ],
+)
+def test_get_module_path_with_file_path(treat_dot_as_module: bool, expected: list[str]) -> None:
+    """Test module path generation with a file path."""
+    file_path = Path("inputs/array-commons.schema.json")
+    result = get_module_path("array-commons.schema", file_path, treat_dot_as_module=treat_dot_as_module)
+    assert result == expected
+
+
+@pytest.mark.parametrize("treat_dot_as_module", [True, False])
+def test_get_module_path_without_file_path(treat_dot_as_module: bool) -> None:
+    """Test module path generation without a file path."""
+    result = get_module_path("my_module.submodule", None, treat_dot_as_module=treat_dot_as_module)
+    expected = ["my_module"]
+    assert result == expected
+
+
+@pytest.mark.parametrize(
+    ("treat_dot_as_module", "name", "expected"),
+    [
+        (True, "a.b.c", ["a", "b"]),
+        (True, "simple", []),
+        (True, "with.dot", ["with"]),
+        (False, "a.b.c", ["a", "b"]),
+        (False, "simple", []),
+        (False, "with.dot", ["with"]),
+    ],
+)
+def test_get_module_path_without_file_path_parametrized(
+    treat_dot_as_module: bool, name: str, expected: list[str]
+) -> None:
+    """Test module path generation without file path for various module names."""
+    result = get_module_path(name, None, treat_dot_as_module=treat_dot_as_module)
+    assert result == expected
diff -pruN 0.26.4-3/tests/model/test_dataclass.py 0.45.0-1/tests/model/test_dataclass.py
--- 0.26.4-3/tests/model/test_dataclass.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/model/test_dataclass.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+"""Tests for dataclass model field generation."""
+
+from __future__ import annotations
+
+from datamodel_code_generator.model.dataclass import DataModelField
+from datamodel_code_generator.types import DataType
+
+
+def test_data_model_field_process_const() -> None:
+    """Test process_const method functionality."""
+    field = DataModelField(name="test_field", data_type=DataType(type="str"), required=True, extras={"const": "v1"})
+
+    field.process_const()
+
+    assert field.const is True
+    assert field.nullable is False
+    assert field.data_type.literals == ["v1"]
+    assert field.default == "v1"
+
+
+def test_data_model_field_process_const_no_const() -> None:
+    """Test process_const when no const is in extras."""
+    field = DataModelField(name="test_field", data_type=DataType(type="str"), required=True, extras={})
+
+    original_nullable = field.nullable
+    original_default = field.default
+    original_const = field.const
+
+    field.process_const()
+
+    assert field.const == original_const
+    assert field.nullable == original_nullable
+    assert field.default == original_default
diff -pruN 0.26.4-3/tests/model/test_typed_dict.py 0.45.0-1/tests/model/test_typed_dict.py
--- 0.26.4-3/tests/model/test_typed_dict.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/model/test_typed_dict.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,33 @@
+"""Tests for TypedDict model field generation."""
+
+from __future__ import annotations
+
+from datamodel_code_generator.model.typed_dict import DataModelField
+from datamodel_code_generator.types import DataType
+
+
+def test_data_model_field_process_const() -> None:
+    """Test process_const method functionality."""
+    field = DataModelField(name="test_field", data_type=DataType(type="str"), required=True, extras={"const": "v1"})
+
+    field.process_const()
+
+    assert field.const is True
+    assert field.nullable is False
+    assert field.data_type.literals == ["v1"]
+    assert field.default == "v1"
+
+
+def test_data_model_field_process_const_no_const() -> None:
+    """Test process_const when no const is in extras."""
+    field = DataModelField(name="test_field", data_type=DataType(type="str"), required=True, extras={})
+
+    original_nullable = field.nullable
+    original_default = field.default
+    original_const = field.const
+
+    field.process_const()
+
+    assert field.const == original_const
+    assert field.nullable == original_nullable
+    assert field.default == original_default
diff -pruN 0.26.4-3/tests/parser/__init__.py 0.45.0-1/tests/parser/__init__.py
--- 0.26.4-3/tests/parser/__init__.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/parser/__init__.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1 @@
+"""Parser unit tests package."""
diff -pruN 0.26.4-3/tests/parser/conftest.py 0.45.0-1/tests/parser/conftest.py
--- 0.26.4-3/tests/parser/conftest.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/parser/conftest.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,7 @@
+"""Shared fixtures for parser tests."""
+
+from __future__ import annotations
+
+from tests.main.conftest import auto_freeze_time, output_file
+
+__all__ = ["auto_freeze_time", "output_file"]
diff -pruN 0.26.4-3/tests/parser/test_base.py 0.45.0-1/tests/parser/test_base.py
--- 0.26.4-3/tests/parser/test_base.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/parser/test_base.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,662 @@
+"""Tests for base parser classes and utilities."""
+
+from __future__ import annotations
+
+from collections import OrderedDict
+from typing import Any
+from unittest.mock import MagicMock
+
+import pytest
+
+from datamodel_code_generator.model import DataModel, DataModelFieldBase
+from datamodel_code_generator.model.pydantic import BaseModel, DataModelField
+from datamodel_code_generator.model.type_alias import TypeAlias, TypeAliasBackport, TypeAliasTypeBackport, TypeStatement
+from datamodel_code_generator.parser.base import (
+    Parser,
+    add_model_path_to_list,
+    escape_characters,
+    exact_import,
+    get_module_directory,
+    relative,
+    sort_data_models,
+    to_hashable,
+)
+from datamodel_code_generator.reference import Reference, snake_to_upper_camel
+from datamodel_code_generator.types import DataType
+
+
+class A(DataModel):
+    """Test data model class A."""
+
+
+class B(DataModel):
+    """Test data model class B."""
+
+
+class C(Parser):
+    """Test parser class C."""
+
+    def parse_raw(self, name: str, raw: dict[str, Any]) -> None:
+        """Parse raw data into models."""
+
+    def parse(self) -> str:  # noqa: PLR6301
+        """Parse and return results."""
+        return "parsed"
+
+
+def test_parser() -> None:
+    """Test parser initialization."""
+    c = C(
+        data_model_type=D,
+        data_model_root_type=B,
+        data_model_field_type=DataModelFieldBase,
+        base_class="Base",
+        source="",
+    )
+    assert c.data_model_type == D
+    assert c.data_model_root_type == B
+    assert c.data_model_field_type == DataModelFieldBase
+    assert c.base_class == "Base"
+
+
+def test_add_model_path_to_list() -> None:
+    """Test method which adds model paths to "update" list."""
+    reference_1 = Reference(path="Base1", original_name="A", name="A")
+    reference_2 = Reference(path="Alias2", original_name="B", name="B")
+    reference_3 = Reference(path="Alias3", original_name="B", name="B")
+    reference_4 = Reference(path="Alias4", original_name="B", name="B")
+    reference_5 = Reference(path="Alias5", original_name="B", name="B")
+    model1 = BaseModel(fields=[], reference=reference_1)
+    model2 = TypeAlias(fields=[], reference=reference_2)
+    model3 = TypeAliasBackport(fields=[], reference=reference_3)
+    model4 = TypeAliasTypeBackport(fields=[], reference=reference_4)
+    model5 = TypeStatement(fields=[], reference=reference_5)
+
+    paths = add_model_path_to_list(None, model1)
+    assert "Base1" in paths
+    assert len(paths) == 1
+
+    paths = list[str]()
+    add_model_path_to_list(paths, model1)
+    assert "Base1" in paths
+    assert len(paths) == 1
+
+    add_model_path_to_list(paths, model1)
+    assert len(paths) != 2
+    assert len(paths) == 1
+
+    add_model_path_to_list(paths, model2)
+    assert "Alias2" not in paths
+
+    add_model_path_to_list(paths, model3)
+    assert "Alias3" not in paths
+
+    add_model_path_to_list(paths, model4)
+    assert "Alias4" not in paths
+
+    add_model_path_to_list(paths, model5)
+    assert "Alias5" not in paths
+
+
+def test_sort_data_models() -> None:
+    """Test sorting data models by dependencies."""
+    reference_a = Reference(path="A", original_name="A", name="A")
+    reference_b = Reference(path="B", original_name="B", name="B")
+    reference_c = Reference(path="C", original_name="C", name="C")
+    data_type_a = DataType(reference=reference_a)
+    data_type_b = DataType(reference=reference_b)
+    data_type_c = DataType(reference=reference_c)
+    reference = [
+        BaseModel(
+            fields=[
+                DataModelField(data_type=data_type_a),
+                DataModelFieldBase(data_type=data_type_c),
+            ],
+            reference=reference_a,
+        ),
+        BaseModel(
+            fields=[DataModelField(data_type=data_type_b)],
+            reference=reference_b,
+        ),
+        BaseModel(
+            fields=[DataModelField(data_type=data_type_b)],
+            reference=reference_c,
+        ),
+    ]
+
+    unresolved, resolved, require_update_action_models = sort_data_models(reference)
+    expected = OrderedDict()
+    expected["B"] = reference[1]
+    expected["C"] = reference[2]
+    expected["A"] = reference[0]
+
+    assert resolved == expected
+    assert unresolved == []
+    assert require_update_action_models == ["B", "A"]
+
+
+def test_sort_data_models_unresolved() -> None:
+    """Test sorting data models with unresolved references."""
+    reference_a = Reference(path="A", original_name="A", name="A")
+    reference_b = Reference(path="B", original_name="B", name="B")
+    reference_c = Reference(path="C", original_name="C", name="C")
+    reference_d = Reference(path="D", original_name="D", name="D")
+    reference_v = Reference(path="V", original_name="V", name="V")
+    reference_z = Reference(path="Z", original_name="Z", name="Z")
+    data_type_a = DataType(reference=reference_a)
+    data_type_b = DataType(reference=reference_b)
+    data_type_c = DataType(reference=reference_c)
+    data_type_v = DataType(reference=reference_v)
+    data_type_z = DataType(reference=reference_z)
+    reference = [
+        BaseModel(
+            fields=[
+                DataModelField(data_type=data_type_a),
+                DataModelFieldBase(data_type=data_type_c),
+            ],
+            reference=reference_a,
+        ),
+        BaseModel(
+            fields=[DataModelField(data_type=data_type_b)],
+            reference=reference_b,
+        ),
+        BaseModel(
+            fields=[DataModelField(data_type=data_type_b)],
+            reference=reference_c,
+        ),
+        BaseModel(
+            fields=[
+                DataModelField(data_type=data_type_a),
+                DataModelField(data_type=data_type_c),
+                DataModelField(data_type=data_type_z),
+            ],
+            reference=reference_d,
+        ),
+        BaseModel(
+            fields=[DataModelField(data_type=data_type_v)],
+            reference=reference_z,
+        ),
+    ]
+
+    with pytest.raises(Exception):  # noqa: B017, PT011
+        sort_data_models(reference)
+
+
+def test_sort_data_models_unresolved_raise_recursion_error() -> None:
+    """Test sorting data models raises error on recursion limit."""
+    reference_a = Reference(path="A", original_name="A", name="A")
+    reference_b = Reference(path="B", original_name="B", name="B")
+    reference_c = Reference(path="C", original_name="C", name="C")
+    reference_d = Reference(path="D", original_name="D", name="D")
+    reference_v = Reference(path="V", original_name="V", name="V")
+    reference_z = Reference(path="Z", original_name="Z", name="Z")
+    data_type_a = DataType(reference=reference_a)
+    data_type_b = DataType(reference=reference_b)
+    data_type_c = DataType(reference=reference_c)
+    data_type_v = DataType(reference=reference_v)
+    data_type_z = DataType(reference=reference_z)
+    reference = [
+        BaseModel(
+            fields=[
+                DataModelField(data_type=data_type_a),
+                DataModelFieldBase(data_type=data_type_c),
+            ],
+            reference=reference_a,
+        ),
+        BaseModel(
+            fields=[DataModelField(data_type=data_type_b)],
+            reference=reference_b,
+        ),
+        BaseModel(
+            fields=[DataModelField(data_type=data_type_b)],
+            reference=reference_c,
+        ),
+        BaseModel(
+            fields=[
+                DataModelField(data_type=data_type_a),
+                DataModelField(data_type=data_type_c),
+                DataModelField(data_type=data_type_z),
+            ],
+            reference=reference_d,
+        ),
+        BaseModel(
+            fields=[DataModelField(data_type=data_type_v)],
+            reference=reference_z,
+        ),
+    ]
+
+    with pytest.raises(Exception):  # noqa: B017, PT011
+        sort_data_models(reference, recursion_count=100000)
+
+
+@pytest.mark.parametrize(
+    ("current_module", "reference", "val"),
+    [
+        ("", "Foo", ("", "")),
+        ("a", "a.Foo", ("", "")),
+        ("a", "a.b.Foo", (".", "b")),
+        ("a.b", "a.Foo", (".", "Foo")),
+        ("a.b.c", "a.Foo", ("..", "Foo")),
+        ("a.b.c", "Foo", ("...", "Foo")),
+    ],
+)
+def test_relative(current_module: str, reference: str, val: tuple[str, str]) -> None:
+    """Test relative import calculation."""
+    assert relative(current_module, reference) == val
+
+
+@pytest.mark.parametrize(
+    ("from_", "import_", "name", "val"),
+    [
+        (".", "mod", "Foo", (".mod", "Foo")),
+        ("..", "mod", "Foo", ("..mod", "Foo")),
+        (".a", "mod", "Foo", (".a.mod", "Foo")),
+        ("..a", "mod", "Foo", ("..a.mod", "Foo")),
+        ("..a.b", "mod", "Foo", ("..a.b.mod", "Foo")),
+    ],
+)
+def test_exact_import(from_: str, import_: str, name: str, val: tuple[str, str]) -> None:
+    """Test exact import formatting."""
+    assert exact_import(from_, import_, name) == val
+
+
+@pytest.mark.parametrize(
+    ("word", "expected"),
+    [
+        (
+            "_hello",
+            "_Hello",
+        ),  # In case a name starts with a underline, we should keep it.
+        ("hello_again", "HelloAgain"),  # regular snake case
+        ("hello__again", "HelloAgain"),  # handles double underscores
+        (
+            "hello___again_again",
+            "HelloAgainAgain",
+        ),  # handles double and single underscores
+        ("hello_again_", "HelloAgain"),  # handles trailing underscores
+        ("hello", "Hello"),  # no underscores
+        ("____", "_"),  # degenerate case, but this is the current expected behavior
+    ],
+)
+def test_snake_to_upper_camel(word: str, expected: str) -> None:
+    """Tests the snake to upper camel function."""
+    actual = snake_to_upper_camel(word)
+    assert actual == expected
+
+
+class D(DataModel):
+    """Test data model class D with custom render."""
+
+    def __init__(self, filename: str, data: str, fields: list[DataModelFieldBase]) -> None:  # noqa: ARG002
+        """Initialize data model with custom data."""
+        super().__init__(fields=fields, reference=Reference(""))
+        self._data = data
+
+    def render(self) -> str:
+        """Render the data model."""
+        return self._data
+
+
+@pytest.fixture
+def parser_fixture() -> C:
+    """Create a test parser instance for unit tests."""
+    return C(
+        data_model_type=D,
+        data_model_root_type=B,
+        data_model_field_type=DataModelFieldBase,
+        base_class="Base",
+        source="",
+    )
+
+
+def test_additional_imports() -> None:
+    """Test that additional imports are inside imports container."""
+    new_parser = C(
+        source="",
+        additional_imports=["collections.deque"],
+    )
+    assert len(new_parser.imports) == 1
+    assert new_parser.imports["collections"] == {"deque"}
+
+
+def test_no_additional_imports() -> None:
+    """Test that not additional imports are not affecting imports container."""
+    new_parser = C(
+        source="",
+    )
+    assert len(new_parser.imports) == 0
+
+
+@pytest.mark.parametrize(
+    ("input_data", "expected"),
+    [
+        (
+            {
+                ("folder1", "module1.py"): "content1",
+                ("folder1", "module2.py"): "content2",
+                ("folder1", "__init__.py"): "init_content",
+            },
+            {
+                ("folder1", "module1.py"): "content1",
+                ("folder1", "module2.py"): "content2",
+                ("folder1", "__init__.py"): "init_content",
+            },
+        ),
+        (
+            {
+                ("folder1.module", "file.py"): "content1",
+                ("folder1.module", "__init__.py"): "init_content",
+            },
+            {
+                ("folder1", "module", "file.py"): "content1",
+                ("folder1", "__init__.py"): "init_content",
+                ("folder1", "module", "__init__.py"): "init_content",
+            },
+        ),
+    ],
+)
+def test_postprocess_result_modules(input_data: Any, expected: Any) -> None:
+    """Test postprocessing of result modules."""
+    result = Parser._Parser__postprocess_result_modules(input_data)
+    assert result == expected
+
+
+def test_find_member_with_integer_enum() -> None:
+    """Test find_member method with integer enum values."""
+    from datamodel_code_generator.model.enum import Enum
+    from datamodel_code_generator.model.pydantic.base_model import DataModelField
+    from datamodel_code_generator.reference import Reference
+    from datamodel_code_generator.types import DataType
+
+    # Create test Enum with integer values
+    enum = Enum(
+        reference=Reference(path="test_path", original_name="TestEnum", name="TestEnum"),
+        fields=[
+            DataModelField(
+                name="VALUE_1000",
+                default="1000",
+                data_type=DataType(type="int"),
+                required=True,
+            ),
+            DataModelField(
+                name="VALUE_100",
+                default="100",
+                data_type=DataType(type="int"),
+                required=True,
+            ),
+            DataModelField(
+                name="VALUE_0",
+                default="0",
+                data_type=DataType(type="int"),
+                required=True,
+            ),
+        ],
+    )
+
+    # Test finding members with integer values
+    assert enum.find_member(1000).field.name == "VALUE_1000"
+    assert enum.find_member(100).field.name == "VALUE_100"
+    assert enum.find_member(0).field.name == "VALUE_0"
+
+    # Test with string representations
+    assert enum.find_member("1000").field.name == "VALUE_1000"
+    assert enum.find_member("100").field.name == "VALUE_100"
+    assert enum.find_member("0").field.name == "VALUE_0"
+
+    # Test with non-existent values
+    assert enum.find_member(999) is None
+    assert enum.find_member("999") is None
+
+
+def test_find_member_with_string_enum() -> None:
+    """Test find_member method with string enum values."""
+    from datamodel_code_generator.model.enum import Enum
+    from datamodel_code_generator.model.pydantic.base_model import DataModelField
+    from datamodel_code_generator.reference import Reference
+    from datamodel_code_generator.types import DataType
+
+    enum = Enum(
+        reference=Reference(path="test_path", original_name="TestEnum", name="TestEnum"),
+        fields=[
+            DataModelField(
+                name="VALUE_A",
+                default="'value_a'",
+                data_type=DataType(type="str"),
+                required=True,
+            ),
+            DataModelField(
+                name="VALUE_B",
+                default="'value_b'",
+                data_type=DataType(type="str"),
+                required=True,
+            ),
+        ],
+    )
+
+    member = enum.find_member("value_a")
+    assert member is not None
+    assert member.field.name == "VALUE_A"
+
+    member = enum.find_member("value_b")
+    assert member is not None
+    assert member.field.name == "VALUE_B"
+
+    member = enum.find_member("'value_a'")
+    assert member is not None
+    assert member.field.name == "VALUE_A"
+
+
+def test_find_member_with_mixed_enum() -> None:
+    """Test find_member method with mixed type enum values."""
+    from datamodel_code_generator.model.enum import Enum
+    from datamodel_code_generator.model.pydantic.base_model import DataModelField
+    from datamodel_code_generator.reference import Reference
+    from datamodel_code_generator.types import DataType
+
+    enum = Enum(
+        reference=Reference(path="test_path", original_name="TestEnum", name="TestEnum"),
+        fields=[
+            DataModelField(
+                name="INT_VALUE",
+                default="100",
+                data_type=DataType(type="int"),
+                required=True,
+            ),
+            DataModelField(
+                name="STR_VALUE",
+                default="'value_a'",
+                data_type=DataType(type="str"),
+                required=True,
+            ),
+        ],
+    )
+
+    member = enum.find_member(100)
+    assert member is not None
+    assert member.field.name == "INT_VALUE"
+
+    member = enum.find_member("100")
+    assert member is not None
+    assert member.field.name == "INT_VALUE"
+
+    member = enum.find_member("value_a")
+    assert member is not None
+    assert member.field.name == "STR_VALUE"
+
+    member = enum.find_member("'value_a'")
+    assert member is not None
+    assert member.field.name == "STR_VALUE"
+
+
+@pytest.fixture
+def escape_map() -> dict[str, str]:
+    """Provide escape character mapping for tests."""
+    return {
+        "\u0000": r"\x00",  # Null byte
+        "'": r"\'",
+        "\b": r"\b",
+        "\f": r"\f",
+        "\n": r"\n",
+        "\r": r"\r",
+        "\t": r"\t",
+        "\\": r"\\",
+    }
+
+
+@pytest.mark.parametrize(
+    ("input_str", "expected"),
+    [
+        ("\u0000", r"\x00"),  # Test null byte
+        ("'", r"\'"),  # Test single quote
+        ("\b", r"\b"),  # Test backspace
+        ("\f", r"\f"),  # Test form feed
+        ("\n", r"\n"),  # Test newline
+        ("\r", r"\r"),  # Test carriage return
+        ("\t", r"\t"),  # Test tab
+        ("\\", r"\\"),  # Test backslash
+    ],
+)
+def test_character_escaping(input_str: str, expected: str) -> None:
+    """Test character escaping in strings."""
+    assert input_str.translate(escape_characters) == expected
+
+
+@pytest.mark.parametrize("flag", [True, False])
+def test_use_non_positive_negative_number_constrained_types(flag: bool) -> None:
+    """Test configuration of non-positive negative number constrained types."""
+    instance = C(source="", use_non_positive_negative_number_constrained_types=flag)
+
+    assert instance.data_type_manager.use_non_positive_negative_number_constrained_types == flag
+
+
+def test_to_hashable_simple_values() -> None:
+    """Test to_hashable with simple values."""
+    assert to_hashable("string") == "string"
+    assert to_hashable(123) == 123
+    assert to_hashable(None) == ""  # noqa: PLC1901
+
+
+def test_to_hashable_list_and_tuple() -> None:
+    """Test to_hashable with list and tuple."""
+    result = to_hashable([3, 1, 2])
+    assert isinstance(result, tuple)
+    assert result == (1, 2, 3)  # sorted
+
+    result = to_hashable((3, 1, 2))
+    assert isinstance(result, tuple)
+    assert result == (1, 2, 3)  # sorted
+
+
+def test_to_hashable_dict() -> None:
+    """Test to_hashable with dict."""
+    result = to_hashable({"b": 2, "a": 1})
+    assert isinstance(result, tuple)
+    # sorted by key
+    assert result == (("a", 1), ("b", 2))
+
+
+def test_to_hashable_mixed_types_fallback() -> None:
+    """Test to_hashable with mixed types that cannot be compared."""
+    mixed_list = [complex(1, 2), complex(3, 4)]
+    result = to_hashable(mixed_list)
+    assert isinstance(result, tuple)
+    # Should preserve order since sorting fails
+    assert result == (complex(1, 2), complex(3, 4))
+
+
+def test_to_hashable_nested_structures() -> None:
+    """Test to_hashable with nested structures."""
+    nested = {"outer": [{"inner": 1}]}
+    result = to_hashable(nested)
+    assert isinstance(result, tuple)
+
+
+def test_postprocess_result_modules_single_element_tuple() -> None:
+    """Test postprocessing with single element tuple (len < 2)."""
+    input_data = {
+        ("__init__.py",): "init_content",
+    }
+    result = Parser._Parser__postprocess_result_modules(input_data)
+    # Single element tuple should remain unchanged
+    assert ("__init__.py",) in result
+
+
+def test_postprocess_result_modules_single_file_no_dot() -> None:
+    """Test postprocessing with single file without dot in name."""
+    input_data = {
+        ("module.py",): "content",
+        ("__init__.py",): "init_content",
+    }
+    result = Parser._Parser__postprocess_result_modules(input_data)
+    assert ("module.py",) in result
+
+
+def test_postprocess_result_modules_single_element_no_dot() -> None:
+    """Test postprocessing with single element without dot (len(r) < 2 branch)."""
+    input_data = {
+        ("__init__.py",): "init_content",
+        ("file",): "content",  # Single element without dot, so len(r) = 1
+    }
+    result = Parser._Parser__postprocess_result_modules(input_data)
+    assert ("file",) in result
+
+
+@pytest.mark.parametrize(
+    ("module", "expected"),
+    [
+        ((), ()),  # empty
+        (("pkg",), ("pkg",)),  # single
+        (("pkg", "issuing"), ("pkg",)),  # submodule
+        (("foo", "bar", "baz"), ("foo", "bar")),  # deeply nested
+    ],
+    ids=["empty", "single", "submodule", "deeply_nested"],
+)
+def test_get_module_directory(module: tuple[str, ...], expected: tuple[str, ...]) -> None:
+    """Test get_module_directory with various inputs."""
+    assert get_module_directory(module) == expected
+
+
+@pytest.mark.parametrize(
+    ("scc_modules", "existing_modules", "expected"),
+    [
+        # name conflict: _internal already exists
+        ({(), ("sub",)}, {("_internal",)}, ("_internal_1",)),
+        # multiple conflicts: _internal and _internal_1 exist
+        ({(), ("sub",)}, {("_internal",), ("_internal_1",)}, ("_internal_2",)),
+        # different prefix break: LCP computation hits break
+        ({("common", "a"), ("common", "b"), ("other", "x")}, set(), ("_internal",)),
+    ],
+    ids=["name_conflict", "multiple_conflicts", "different_prefix_break"],
+)
+def test_compute_internal_module_path(
+    parser_fixture: C,
+    scc_modules: set[tuple[str, ...]],
+    existing_modules: set[tuple[str, ...]],
+    expected: tuple[str, ...],
+) -> None:
+    """Test __compute_internal_module_path with various conflict scenarios."""
+    result = parser_fixture._Parser__compute_internal_module_path(scc_modules, existing_modules)
+    assert result == expected
+
+
+def test_build_module_dependency_graph_with_missing_ref(parser_fixture: C) -> None:
+    """Test __build_module_dependency_graph when reference path is not in path_to_module."""
+    ref_source = MagicMock()
+    ref_source.source = True
+    ref_source.path = "nonexistent.Model"
+
+    data_type = MagicMock()
+    data_type.reference = ref_source
+
+    model1 = MagicMock()
+    model1.path = "pkg.Model1"
+    model1.all_data_types = [data_type]
+    model1.base_classes = []
+
+    module_models_list = [
+        (("pkg",), [model1]),
+    ]
+
+    graph = parser_fixture._Parser__build_module_dependency_graph(module_models_list)
+
+    assert graph == {("pkg",): set()}
diff -pruN 0.26.4-3/tests/parser/test_graphql.py 0.45.0-1/tests/parser/test_graphql.py
--- 0.26.4-3/tests/parser/test_graphql.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/parser/test_graphql.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,92 @@
+"""Tests for GraphQL schema parser."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, Any
+
+import pytest
+
+from datamodel_code_generator.model.dataclass import DataClass
+from datamodel_code_generator.parser.graphql import GraphQLParser
+from datamodel_code_generator.reference import Reference
+from tests.conftest import create_assert_file_content
+from tests.main.conftest import GRAPHQL_DATA_PATH, run_main_and_assert
+from tests.main.test_main_general import DATA_PATH
+
+if TYPE_CHECKING:
+    from pathlib import Path
+
+EXPECTED_GRAPHQL_PATH: Path = DATA_PATH / "expected" / "parser" / "graphql"
+
+assert_file_content = create_assert_file_content(EXPECTED_GRAPHQL_PATH)
+
+
+def test_graphql_field_enum(output_file: Path) -> None:
+    """Test parsing GraphQL field with enum default value."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "field-default-enum.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="field-default-enum.py",
+        extra_args=["--set-default-enum-member"],
+    )
+
+
+def test_graphql_union_aliased_bug(output_file: Path) -> None:
+    """Test parsing GraphQL union with aliased types."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "union-aliased-bug.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="union-aliased-bug.py",
+    )
+
+
+def test_graphql_union_commented(output_file: Path) -> None:
+    """Test parsing GraphQL union with comments."""
+    run_main_and_assert(
+        input_path=GRAPHQL_DATA_PATH / "union-commented.graphql",
+        output_path=output_file,
+        input_file_type="graphql",
+        assert_func=assert_file_content,
+        expected_file="union-commented.py",
+    )
+
+
+@pytest.mark.parametrize(
+    ("frozen_dataclasses", "keyword_only", "parser_dataclass_args", "kwargs_dataclass_args", "expected"),
+    [
+        (False, False, None, None, {}),
+        (True, False, None, None, {"frozen": True}),
+        (False, True, None, None, {"kw_only": True}),
+        (True, True, None, None, {"frozen": True, "kw_only": True}),
+        (False, False, {"slots": True}, None, {"slots": True}),
+        (True, True, {"slots": True}, None, {"slots": True}),
+        (True, True, {"slots": True}, {"order": True}, {"order": True}),
+    ],
+)
+def test_create_data_model_dataclass_arguments(
+    frozen_dataclasses: bool,
+    keyword_only: bool,
+    parser_dataclass_args: dict | None,
+    kwargs_dataclass_args: dict | None,
+    expected: dict,
+) -> None:
+    """Test _create_data_model handles dataclass_arguments correctly."""
+    parser = GraphQLParser(
+        source="type Query { id: ID }",
+        data_model_type=DataClass,
+        frozen_dataclasses=frozen_dataclasses,
+        keyword_only=keyword_only,
+    )
+    parser.dataclass_arguments = parser_dataclass_args
+
+    reference = Reference(path="test", original_name="Test", name="Test")
+    kwargs: dict[str, Any] = {"reference": reference, "fields": []}
+    if kwargs_dataclass_args is not None:
+        kwargs["dataclass_arguments"] = kwargs_dataclass_args
+    result = parser._create_data_model(**kwargs)
+    assert isinstance(result, DataClass)
+    assert result.dataclass_arguments == expected
diff -pruN 0.26.4-3/tests/parser/test_jsonschema.py 0.45.0-1/tests/parser/test_jsonschema.py
--- 0.26.4-3/tests/parser/test_jsonschema.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/parser/test_jsonschema.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,1111 @@
+"""Tests for JSON Schema parser."""
+
+from __future__ import annotations
+
+import json
+from pathlib import Path
+from typing import TYPE_CHECKING, Any, Optional, Union
+from unittest.mock import call
+
+import pydantic
+import pytest
+import yaml
+
+from datamodel_code_generator import AllOfMergeMode
+from datamodel_code_generator.imports import Import
+from datamodel_code_generator.model import DataModelFieldBase
+from datamodel_code_generator.model.dataclass import DataClass
+from datamodel_code_generator.model.pydantic.base_model import BaseModel
+from datamodel_code_generator.parser.base import Parser, dump_templates
+from datamodel_code_generator.parser.jsonschema import (
+    JsonSchemaObject,
+    JsonSchemaParser,
+    Types,
+    get_model_by_path,
+)
+from datamodel_code_generator.reference import SPECIAL_PATH_MARKER, Reference
+from datamodel_code_generator.types import DataType
+from tests.conftest import assert_output
+
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+DATA_PATH: Path = Path(__file__).parents[1] / "data" / "jsonschema"
+
+EXPECTED_JSONSCHEMA_PATH = Path(__file__).parents[1] / "data" / "expected" / "parser" / "jsonschema"
+
+
+@pytest.mark.parametrize(
+    ("schema", "path", "model"),
+    [
+        ({"foo": "bar"}, None, {"foo": "bar"}),
+        ({"a": {"foo": "bar"}}, "a", {"foo": "bar"}),
+        ({"a": {"b": {"foo": "bar"}}}, "a/b", {"foo": "bar"}),
+        ({"a": {"b": {"c": {"foo": "bar"}}}}, "a/b", {"c": {"foo": "bar"}}),
+        ({"a": {"b": {"c": {"foo": "bar"}}}}, "a/b/c", {"foo": "bar"}),
+    ],
+)
+def test_get_model_by_path(schema: dict, path: str, model: dict) -> None:
+    """Test model retrieval by path."""
+    assert get_model_by_path(schema, path.split("/") if path else []) == model
+
+
+def test_json_schema_object_ref_url_json(mocker: MockerFixture) -> None:
+    """Test JSON schema object reference with JSON URL."""
+    parser = JsonSchemaParser("")
+    obj = JsonSchemaObject.parse_obj({"$ref": "https://example.com/person.schema.json#/definitions/User"})
+    mock_get = mocker.patch("httpx.get")
+    mock_get.return_value.text = json.dumps(
+        {
+            "$id": "https://example.com/person.schema.json",
+            "$schema": "http://json-schema.org/draft-07/schema#",
+            "definitions": {
+                "User": {
+                    "type": "object",
+                    "properties": {
+                        "name": {
+                            "type": "string",
+                        }
+                    },
+                }
+            },
+        },
+    )
+
+    parser.parse_ref(obj, ["Model"])
+    assert (
+        dump_templates(list(parser.results))
+        == """class User(BaseModel):
+    name: Optional[str] = None"""
+    )
+    parser.parse_ref(obj, ["Model"])
+    mock_get.assert_has_calls([
+        call(
+            "https://example.com/person.schema.json",
+            headers=None,
+            verify=True,
+            follow_redirects=True,
+            params=None,
+        ),
+    ])
+
+
+def test_json_schema_object_ref_url_yaml(mocker: MockerFixture) -> None:
+    """Test JSON schema object reference with YAML URL."""
+    parser = JsonSchemaParser("")
+    obj = JsonSchemaObject.parse_obj({"$ref": "https://example.org/schema.yaml#/definitions/User"})
+    mock_get = mocker.patch("httpx.get")
+    mock_get.return_value.text = yaml.safe_dump(json.load((DATA_PATH / "user.json").open()))
+
+    parser.parse_ref(obj, ["User"])
+    assert (
+        dump_templates(list(parser.results))
+        == """class User(BaseModel):
+    name: Optional[str] = Field(None, example='ken')
+    pets: List[User] = Field(default_factory=list)
+
+
+class Pet(BaseModel):
+    name: Optional[str] = Field(None, examples=['dog', 'cat'])"""
+    )
+    parser.parse_ref(obj, [])
+    mock_get.assert_called_once_with(
+        "https://example.org/schema.yaml",
+        headers=None,
+        verify=True,
+        follow_redirects=True,
+        params=None,
+    )
+
+
+def test_json_schema_object_cached_ref_url_yaml(mocker: MockerFixture) -> None:
+    """Test JSON schema object cached reference with YAML URL."""
+    parser = JsonSchemaParser("")
+
+    obj = JsonSchemaObject.parse_obj({
+        "type": "object",
+        "properties": {
+            "pet": {"$ref": "https://example.org/schema.yaml#/definitions/Pet"},
+            "user": {"$ref": "https://example.org/schema.yaml#/definitions/User"},
+        },
+    })
+    mock_get = mocker.patch("httpx.get")
+    mock_get.return_value.text = yaml.safe_dump(json.load((DATA_PATH / "user.json").open()))
+
+    parser.parse_ref(obj, [])
+    assert (
+        dump_templates(list(parser.results))
+        == """class Pet(BaseModel):
+    name: Optional[str] = Field(None, examples=['dog', 'cat'])
+
+
+class User(BaseModel):
+    name: Optional[str] = Field(None, example='ken')
+    pets: List[User] = Field(default_factory=list)"""
+    )
+    mock_get.assert_called_once_with(
+        "https://example.org/schema.yaml",
+        headers=None,
+        verify=True,
+        follow_redirects=True,
+        params=None,
+    )
+
+
+def test_json_schema_ref_url_json(mocker: MockerFixture) -> None:
+    """Test JSON schema reference with JSON URL."""
+    parser = JsonSchemaParser("")
+    obj = {
+        "type": "object",
+        "properties": {"user": {"$ref": "https://example.org/schema.json#/definitions/User"}},
+    }
+    mock_get = mocker.patch("httpx.get")
+    mock_get.return_value.text = json.dumps(json.load((DATA_PATH / "user.json").open()))
+
+    parser.parse_raw_obj("Model", obj, ["Model"])
+    assert (
+        dump_templates(list(parser.results))
+        == """class Model(BaseModel):
+    user: Optional[User] = None
+
+
+class User(BaseModel):
+    name: Optional[str] = Field(None, example='ken')
+    pets: List[User] = Field(default_factory=list)
+
+
+class Pet(BaseModel):
+    name: Optional[str] = Field(None, examples=['dog', 'cat'])"""
+    )
+    mock_get.assert_called_once_with(
+        "https://example.org/schema.json",
+        headers=None,
+        verify=True,
+        follow_redirects=True,
+        params=None,
+    )
+
+
+@pytest.mark.parametrize(
+    ("source_obj", "generated_classes"),
+    [
+        (
+            {
+                "$id": "https://example.com/person.schema.json",
+                "$schema": "http://json-schema.org/draft-07/schema#",
+                "title": "Person",
+                "type": "object",
+                "properties": {
+                    "firstName": {
+                        "type": "string",
+                        "description": "The person's first name.",
+                    },
+                    "lastName": {
+                        "type": "string",
+                        "description": "The person's last name.",
+                    },
+                    "age": {
+                        "description": "Age in years which must be equal to or greater than zero.",
+                        "type": "integer",
+                        "minimum": 0,
+                    },
+                },
+            },
+            """class Person(BaseModel):
+    firstName: Optional[str] = None
+    lastName: Optional[str] = None
+    age: Optional[conint(ge=0)] = None""",
+        ),
+        (
+            {
+                "$id": "https://example.com/person.schema.json",
+                "$schema": "http://json-schema.org/draft-07/schema#",
+                "title": "person-object",
+                "type": "object",
+                "properties": {
+                    "name": {
+                        "type": "string",
+                        "description": "The person's name.",
+                    },
+                    "home-address": {
+                        "$ref": "#/definitions/home-address",
+                        "description": "The person's home address.",
+                    },
+                },
+                "definitions": {
+                    "home-address": {
+                        "type": "object",
+                        "properties": {
+                            "street-address": {"type": "string"},
+                            "city": {"type": "string"},
+                            "state": {"type": "string"},
+                        },
+                        "required": ["street_address", "city", "state"],
+                    }
+                },
+            },
+            """class Person(BaseModel):
+    name: Optional[str] = None
+    home_address: Optional[HomeAddress] = None""",
+        ),
+    ],
+)
+def test_parse_object(source_obj: dict[str, Any], generated_classes: str) -> None:
+    """Test parsing JSON schema objects."""
+    parser = JsonSchemaParser(
+        data_model_field_type=DataModelFieldBase,
+        source="",
+    )
+    parser.parse_object("Person", JsonSchemaObject.parse_obj(source_obj), [])
+    assert dump_templates(list(parser.results)) == generated_classes
+
+
+@pytest.mark.parametrize(
+    ("source_obj", "generated_classes"),
+    [
+        (
+            {
+                "$id": "https://example.com/person.schema.json",
+                "$schema": "http://json-schema.org/draft-07/schema#",
+                "title": "AnyJson",
+                "description": "This field accepts any object",
+                "discriminator": "type",
+            },
+            """class AnyObject(BaseModel):
+    __root__: Any = Field(..., description='This field accepts any object', discriminator='type', title='AnyJson')""",
+        )
+    ],
+)
+def test_parse_any_root_object(source_obj: dict[str, Any], generated_classes: str) -> None:
+    """Test parsing any root object."""
+    parser = JsonSchemaParser("")
+    parser.parse_root_type("AnyObject", JsonSchemaObject.parse_obj(source_obj), [])
+    assert dump_templates(list(parser.results)) == generated_classes
+
+
+@pytest.mark.parametrize(
+    ("source_obj", "generated_classes"),
+    [
+        (
+            yaml.safe_load((DATA_PATH / "oneof.json").read_text()),
+            (DATA_PATH / "oneof.json.snapshot").read_text(),
+        )
+    ],
+)
+def test_parse_one_of_object(source_obj: dict[str, Any], generated_classes: str) -> None:
+    """Test parsing oneOf schema objects."""
+    parser = JsonSchemaParser("")
+    parser.parse_raw_obj("onOfObject", source_obj, [])
+    assert dump_templates(list(parser.results)) == generated_classes
+
+
+@pytest.mark.parametrize(
+    ("source_obj", "generated_classes"),
+    [
+        (
+            {
+                "$id": "https://example.com/person.schema.json",
+                "$schema": "http://json-schema.org/draft-07/schema#",
+                "title": "defaults",
+                "type": "object",
+                "properties": {
+                    "string": {
+                        "type": "string",
+                        "default": "default string",
+                    },
+                    "string_on_field": {
+                        "type": "string",
+                        "default": "default string",
+                        "description": "description",
+                    },
+                    "number": {"type": "number", "default": 123},
+                    "number_on_field": {
+                        "type": "number",
+                        "default": 123,
+                        "description": "description",
+                    },
+                    "number_array": {"type": "array", "default": [1, 2, 3]},
+                    "string_array": {"type": "array", "default": ["a", "b", "c"]},
+                    "object": {"type": "object", "default": {"key": "value"}},
+                },
+            },
+            """class Defaults(BaseModel):
+    string: Optional[str] = 'default string'
+    string_on_field: Optional[str] = Field('default string', description='description')
+    number: Optional[float] = 123
+    number_on_field: Optional[float] = Field(123, description='description')
+    number_array: Optional[List[Any]] = [1, 2, 3]
+    string_array: Optional[List[Any]] = ['a', 'b', 'c']
+    object: Optional[Dict[str, Any]] = {'key': 'value'}""",
+        )
+    ],
+)
+def test_parse_default(source_obj: dict[str, Any], generated_classes: str) -> None:
+    """Test parsing default values in schemas."""
+    parser = JsonSchemaParser("")
+    parser.parse_raw_obj("Defaults", source_obj, [])
+    assert dump_templates(list(parser.results)) == generated_classes
+
+
+def test_parse_array_schema() -> None:
+    """Test parsing array schemas."""
+    parser = JsonSchemaParser("")
+    parser.parse_raw_obj("schema", {"type": "object", "properties": {"name": True}}, [])
+    assert (
+        dump_templates(list(parser.results))
+        == """class Schema(BaseModel):
+    name: Optional[Any] = None"""
+    )
+
+
+def test_parse_nested_array(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing nested array schemas."""
+    monkeypatch.chdir(tmp_path)
+    parser = JsonSchemaParser(
+        DATA_PATH / "nested_array.json",
+        data_model_field_type=DataModelFieldBase,
+    )
+    parser.parse()
+    assert_output(dump_templates(list(parser.results)), DATA_PATH / "nested_array.json.snapshot")
+
+
+@pytest.mark.parametrize(
+    ("schema_type", "schema_format", "result_type", "from_", "import_", "use_pendulum"),
+    [
+        ("integer", "int32", "int", None, None, False),
+        ("integer", "int64", "int", None, None, False),
+        ("integer", "date-time", "datetime", "datetime", "datetime", False),
+        ("integer", "date-time", "DateTime", "pendulum", "DateTime", True),
+        ("integer", "unix-time", "int", None, None, False),
+        ("number", "float", "float", None, None, False),
+        ("number", "double", "float", None, None, False),
+        ("number", "time", "time", "datetime", "time", False),
+        ("number", "time", "Time", "pendulum", "Time", True),
+        ("number", "date-time", "datetime", "datetime", "datetime", False),
+        ("number", "date-time", "DateTime", "pendulum", "DateTime", True),
+        ("string", None, "str", None, None, False),
+        ("string", "byte", "str", None, None, False),
+        ("string", "binary", "bytes", None, None, False),
+        ("boolean", None, "bool", None, None, False),
+        ("string", "date", "date", "datetime", "date", False),
+        ("string", "date", "Date", "pendulum", "Date", True),
+        ("string", "date-time", "datetime", "datetime", "datetime", False),
+        ("string", "date-time", "DateTime", "pendulum", "DateTime", True),
+        ("string", "duration", "timedelta", "datetime", "timedelta", False),
+        ("string", "duration", "Duration", "pendulum", "Duration", True),
+        ("string", "path", "Path", "pathlib", "Path", False),
+        ("string", "password", "SecretStr", "pydantic", "SecretStr", False),
+        ("string", "email", "EmailStr", "pydantic", "EmailStr", False),
+        ("string", "uri", "AnyUrl", "pydantic", "AnyUrl", False),
+        ("string", "uri-reference", "str", None, None, False),
+        ("string", "uuid", "UUID", "uuid", "UUID", False),
+        ("string", "uuid1", "UUID1", "pydantic", "UUID1", False),
+        ("string", "uuid2", "UUID2", "pydantic", "UUID2", False),
+        ("string", "uuid3", "UUID3", "pydantic", "UUID3", False),
+        ("string", "uuid4", "UUID4", "pydantic", "UUID4", False),
+        ("string", "uuid5", "UUID5", "pydantic", "UUID5", False),
+        ("string", "ipv4", "IPv4Address", "ipaddress", "IPv4Address", False),
+        ("string", "ipv6", "IPv6Address", "ipaddress", "IPv6Address", False),
+        ("string", "unknown-type", "str", None, None, False),
+    ],
+)
+def test_get_data_type(
+    schema_type: str,
+    schema_format: str,
+    result_type: str,
+    from_: str | None,
+    import_: str | None,
+    use_pendulum: bool,
+) -> None:
+    """Test data type resolution from schema type and format."""
+    if from_ and import_:
+        import_: Import | None = Import(from_=from_, import_=import_)
+    else:
+        import_ = None
+
+    parser = JsonSchemaParser("", use_pendulum=use_pendulum)
+    assert (
+        parser.get_data_type(JsonSchemaObject(type=schema_type, format=schema_format)).dict()
+        == DataType(type=result_type, import_=import_).dict()
+    )
+
+
+@pytest.mark.parametrize(
+    ("schema_types", "result_types"),
+    [
+        (["integer", "number"], ["int", "float"]),
+        (["integer", "null"], ["int"]),
+    ],
+)
+def test_get_data_type_array(schema_types: list[str], result_types: list[str]) -> None:
+    """Test data type resolution for array of types."""
+    parser = JsonSchemaParser("")
+    assert parser.get_data_type(JsonSchemaObject(type=schema_types)) == parser.data_type(
+        data_types=[
+            parser.data_type(
+                type=r,
+            )
+            for r in result_types
+        ],
+        is_optional="null" in schema_types,
+    )
+
+
+def test_additional_imports() -> None:
+    """Test that additional imports are inside imports container."""
+    new_parser = JsonSchemaParser(source="", additional_imports=["collections.deque"])
+    assert len(new_parser.imports) == 1
+    assert new_parser.imports["collections"] == {"deque"}
+
+
+def test_no_additional_imports() -> None:
+    """Test that not additional imports are not affecting imports container."""
+    new_parser = JsonSchemaParser(
+        source="",
+    )
+    assert len(new_parser.imports) == 0
+
+
+@pytest.mark.parametrize(
+    ("source_obj", "generated_classes"),
+    [
+        (
+            {
+                "$id": "https://example.com/person.schema.json",
+                "$schema": "http://json-schema.org/draft-07/schema#",
+                "title": "Person",
+                "type": "object",
+                "properties": {
+                    "firstName": {
+                        "type": "string",
+                        "description": "The person's first name.",
+                        "alt_type": "integer",
+                    },
+                    "lastName": {
+                        "type": "string",
+                        "description": "The person's last name.",
+                        "alt_type": "integer",
+                    },
+                    "age": {
+                        "description": "Age in years which must be equal to or greater than zero.",
+                        "type": "integer",
+                        "minimum": 0,
+                        "alt_type": "number",
+                    },
+                    "real_age": {
+                        "description": "Age in years which must be equal to or greater than zero.",
+                        "type": "integer",
+                        "minimum": 0,
+                    },
+                },
+            },
+            """class Person(BaseModel):
+    firstName: Optional[int] = None
+    lastName: Optional[int] = None
+    age: Optional[confloat(ge=0.0)] = None
+    real_age: Optional[conint(ge=0)] = None""",
+        ),
+    ],
+)
+@pytest.mark.skipif(pydantic.VERSION < "2.0.0", reason="Require Pydantic version 2.0.0 or later ")
+def test_json_schema_parser_extension(source_obj: dict[str, Any], generated_classes: str) -> None:
+    """Test JSON schema parser extension with alt_type support."""
+
+    class AltJsonSchemaObject(JsonSchemaObject):
+        properties: Optional[dict[str, Union[AltJsonSchemaObject, bool]]] = None  # noqa: UP007, UP045
+        alt_type: Optional[str] = None  # noqa: UP045
+
+        def model_post_init(self, context: Any) -> None:  # noqa: ARG002
+            if self.alt_type:
+                self.type = self.alt_type
+
+    class AltJsonSchemaParser(JsonSchemaParser):
+        SCHEMA_OBJECT_TYPE = AltJsonSchemaObject
+
+    parser = AltJsonSchemaParser(
+        data_model_field_type=DataModelFieldBase,
+        source="",
+    )
+    parser.parse_object("Person", AltJsonSchemaObject.parse_obj(source_obj), [])
+    assert dump_templates(list(parser.results)) == generated_classes
+
+
+def test_create_data_model_with_frozen_dataclasses() -> None:
+    """Test _create_data_model when frozen_dataclasses attribute exists."""
+    parser = JsonSchemaParser(
+        "",
+        data_model_type=DataClass,
+        data_model_root_type=DataClass,
+    )
+    parser.frozen_dataclasses = True
+
+    field = DataModelFieldBase(name="test_field", data_type=DataType(type="str"), required=True)
+
+    result = parser._create_data_model(
+        reference=Reference(name="TestModel", path="test_model"),
+        fields=[field],
+    )
+
+    assert isinstance(result, DataClass)
+    assert result.name == "TestModel"
+
+
+def test_create_data_model_with_keyword_only() -> None:
+    """Test _create_data_model when keyword_only attribute exists."""
+    parser = JsonSchemaParser(
+        "",
+        data_model_type=DataClass,
+        data_model_root_type=DataClass,
+    )
+    parser.keyword_only = True
+
+    field = DataModelFieldBase(name="test_field", data_type=DataType(type="str"), required=True)
+
+    result = parser._create_data_model(
+        reference=Reference(name="TestModel", path="test_model"),
+        fields=[field],
+    )
+
+    assert isinstance(result, DataClass)
+    assert result.name == "TestModel"
+
+
+def test_create_data_model_with_both_frozen_and_keyword_only() -> None:
+    """Test _create_data_model when both frozen_dataclasses and keyword_only exist."""
+    parser = JsonSchemaParser(
+        "",
+        data_model_type=DataClass,
+        data_model_root_type=DataClass,
+    )
+    parser.frozen_dataclasses = True
+    parser.keyword_only = True
+
+    field = DataModelFieldBase(name="test_field", data_type=DataType(type="str"), required=True)
+
+    result = parser._create_data_model(
+        reference=Reference(name="TestModel", path="test_model"),
+        fields=[field],
+    )
+
+    assert isinstance(result, DataClass)
+    assert result.name == "TestModel"
+
+
+def test_create_data_model_with_existing_dataclass_arguments() -> None:
+    """Test _create_data_model when existing dataclass_arguments are provided in kwargs."""
+    parser = JsonSchemaParser(
+        "",
+        data_model_type=DataClass,
+        data_model_root_type=DataClass,
+    )
+    parser.frozen_dataclasses = True
+    parser.keyword_only = True
+
+    field = DataModelFieldBase(name="test_field", data_type=DataType(type="str"), required=True)
+
+    result = parser._create_data_model(
+        reference=Reference(name="TestModel", path="test_model"),
+        fields=[field],
+        dataclass_arguments={"slots": True, "order": True},
+    )
+
+    assert isinstance(result, DataClass)
+    assert result.name == "TestModel"
+
+
+def test_create_data_model_without_existing_dataclass_arguments() -> None:
+    """Test _create_data_model when no existing dataclass_arguments (else branch)."""
+    parser = JsonSchemaParser(
+        "",
+        data_model_type=DataClass,
+        data_model_root_type=DataClass,
+    )
+    parser.frozen_dataclasses = False
+    parser.keyword_only = False
+
+    field = DataModelFieldBase(name="test_field", data_type=DataType(type="str"), required=True)
+
+    result = parser._create_data_model(
+        reference=Reference(name="TestModel", path="test_model"),
+        fields=[field],
+    )
+
+    assert isinstance(result, DataClass)
+    assert result.name == "TestModel"
+
+
+def test_create_data_model_frozen_and_keyword_only_cleanup() -> None:
+    """Test that frozen and keyword_only are popped from kwargs when existing args present."""
+    parser = JsonSchemaParser(
+        "",
+        data_model_type=DataClass,
+        data_model_root_type=DataClass,
+    )
+    parser.frozen_dataclasses = True
+    parser.keyword_only = True
+
+    field = DataModelFieldBase(name="test_field", data_type=DataType(type="str"), required=True)
+
+    result = parser._create_data_model(
+        reference=Reference(name="TestModel", path="test_model"),
+        fields=[field],
+        dataclass_arguments={"slots": True},
+        frozen=False,
+        keyword_only=False,
+    )
+
+    assert isinstance(result, DataClass)
+    assert result.name == "TestModel"
+
+
+def test_create_data_model_with_complex_existing_arguments() -> None:
+    """Test _create_data_model with complex existing dataclass_arguments that get merged."""
+    parser = JsonSchemaParser(
+        "",
+        data_model_type=DataClass,
+        data_model_root_type=DataClass,
+    )
+    parser.frozen_dataclasses = True
+    parser.keyword_only = True
+
+    field = DataModelFieldBase(name="test_field", data_type=DataType(type="str"), required=True)
+
+    result = parser._create_data_model(
+        reference=Reference(name="TestModel", path="test_model"),
+        fields=[field],
+        dataclass_arguments={
+            "slots": True,
+            "order": True,
+            "unsafe_hash": False,
+            "match_args": True,
+        },
+    )
+
+    assert isinstance(result, DataClass)
+    assert result.name == "TestModel"
+
+
+def test_create_data_model_none_dataclass_arguments() -> None:
+    """Test _create_data_model when dataclass_arguments is explicitly None."""
+    parser = JsonSchemaParser(
+        "",
+        data_model_type=DataClass,
+        data_model_root_type=DataClass,
+    )
+    parser.frozen_dataclasses = True
+    parser.keyword_only = True
+
+    field = DataModelFieldBase(name="test_field", data_type=DataType(type="str"), required=True)
+
+    result = parser._create_data_model(
+        reference=Reference(name="TestModel", path="test_model"),
+        fields=[field],
+        dataclass_arguments=None,
+    )
+
+    assert isinstance(result, DataClass)
+    assert result.name == "TestModel"
+
+
+def test_create_data_model_non_dataclass_with_dataclass_arguments() -> None:
+    """Test _create_data_model removes dataclass_arguments for non-DataClass models."""
+    parser = JsonSchemaParser(
+        "",
+        data_model_type=BaseModel,
+        data_model_root_type=BaseModel,
+    )
+
+    field = DataModelFieldBase(name="test_field", data_type=DataType(type="str"), required=True)
+
+    # Pass dataclass_arguments even though model is not DataClass - should be removed
+    result = parser._create_data_model(
+        reference=Reference(name="TestModel", path="test_model"),
+        fields=[field],
+        dataclass_arguments={"frozen": True},
+    )
+
+    assert isinstance(result, BaseModel)
+    assert result.name == "TestModel"
+
+
+def test_parse_type_mappings_invalid_format() -> None:
+    """Test _parse_type_mappings raises ValueError for invalid format."""
+    with pytest.raises(ValueError, match="Invalid type mapping format"):
+        Parser._parse_type_mappings(["invalid_without_equals"])
+
+
+def test_parse_type_mappings_valid_formats() -> None:
+    """Test _parse_type_mappings with valid formats."""
+    result = Parser._parse_type_mappings(["binary=string", "string+date=string"])
+    assert result == {
+        ("string", "binary"): "string",
+        ("string", "date"): "string",
+    }
+
+
+def test_get_type_with_mappings_to_format() -> None:
+    """Test _get_type_with_mappings mapping to a format within type_formats."""
+    parser = JsonSchemaParser(
+        source="",
+        type_mappings=["binary=byte"],
+    )
+    result = parser._get_type_with_mappings("string", "binary")
+    assert result == Types.byte
+
+
+def test_get_type_with_mappings_to_type_default() -> None:
+    """Test _get_type_with_mappings mapping to a top-level type's default."""
+    parser = JsonSchemaParser(
+        source="",
+        type_mappings=["binary=boolean"],
+    )
+    result = parser._get_type_with_mappings("string", "binary")
+    assert result == Types.boolean
+
+
+def test_get_type_with_mappings_unknown_target_fallback() -> None:
+    """Test _get_type_with_mappings falls back to _get_type for unknown target."""
+    parser = JsonSchemaParser(
+        source="",
+        type_mappings=["binary=unknown_format"],
+    )
+    result = parser._get_type_with_mappings("string", "binary")
+    assert result == Types.binary
+
+
+@pytest.mark.parametrize(
+    ("frozen_dataclasses", "keyword_only", "parser_dataclass_args", "kwargs_dataclass_args", "expected"),
+    [
+        (False, False, None, None, {}),
+        (True, False, None, None, {"frozen": True}),
+        (False, True, None, None, {"kw_only": True}),
+        (True, True, None, None, {"frozen": True, "kw_only": True}),
+        (False, False, {"slots": True}, None, {"slots": True}),
+        (True, True, {"slots": True}, None, {"slots": True}),
+        (True, True, {"slots": True}, {"order": True}, {"order": True}),
+    ],
+)
+def test_create_data_model_dataclass_arguments(
+    frozen_dataclasses: bool,
+    keyword_only: bool,
+    parser_dataclass_args: dict | None,
+    kwargs_dataclass_args: dict | None,
+    expected: dict,
+) -> None:
+    """Test _create_data_model handles dataclass_arguments correctly."""
+    parser = JsonSchemaParser(
+        source="",
+        data_model_type=DataClass,
+        frozen_dataclasses=frozen_dataclasses,
+        keyword_only=keyword_only,
+    )
+    parser.dataclass_arguments = parser_dataclass_args
+
+    reference = Reference(path="test", original_name="Test", name="Test")
+    kwargs: dict[str, Any] = {"reference": reference, "fields": []}
+    if kwargs_dataclass_args is not None:
+        kwargs["dataclass_arguments"] = kwargs_dataclass_args
+    result = parser._create_data_model(**kwargs)
+    assert isinstance(result, DataClass)
+    assert result.dataclass_arguments == expected
+
+
+def test_get_ref_body_from_url_file_unc_path(mocker: MockerFixture) -> None:
+    """Test _get_ref_body_from_url handles UNC file:// URLs correctly."""
+    parser = JsonSchemaParser("")
+    mock_load = mocker.patch(
+        "datamodel_code_generator.parser.jsonschema.load_yaml_dict_from_path",
+        return_value={"type": "object"},
+    )
+
+    result = parser._get_ref_body_from_url("file://server/share/schemas/pet.json")
+
+    assert result == {"type": "object"}
+    mock_load.assert_called_once()
+    called_path = mock_load.call_args[0][0]
+    # On Windows, UNC paths have \\server\share\ as a single "drive" part
+    # On POSIX, they're separate: /, server, share, schemas, pet.json
+    path_str = str(called_path)
+    assert "server" in path_str
+    assert "share" in path_str
+    assert called_path.parts[-2:] == ("schemas", "pet.json")
+
+
+def test_get_ref_body_from_url_file_local_path(mocker: MockerFixture) -> None:
+    """Test _get_ref_body_from_url handles local file:// URLs (no netloc)."""
+    parser = JsonSchemaParser("")
+    mock_load = mocker.patch(
+        "datamodel_code_generator.parser.jsonschema.load_yaml_dict_from_path",
+        return_value={"type": "string"},
+    )
+
+    result = parser._get_ref_body_from_url("file:///home/user/schemas/pet.json")
+
+    assert result == {"type": "string"}
+    mock_load.assert_called_once()
+    called_path = mock_load.call_args[0][0]
+    assert called_path.parts[-4:] == ("home", "user", "schemas", "pet.json")
+
+
+def test_merge_ref_with_schema_no_ref() -> None:
+    """Test _merge_ref_with_schema returns object unchanged when no $ref is present."""
+    parser = JsonSchemaParser("")
+    obj = JsonSchemaObject.parse_obj({"type": "string", "minLength": 5})
+    result = parser._merge_ref_with_schema(obj)
+    assert result is obj
+
+
+def test_has_ref_with_schema_keywords_extras_with_schema_affecting_keys() -> None:
+    """Test has_ref_with_schema_keywords when extras contains schema-affecting keys."""
+    # const is stored in extras and is schema-affecting
+    obj = JsonSchemaObject.parse_obj({
+        "$ref": "#/$defs/Base",
+        "const": "active",
+    })
+    # Verify extras contains schema-affecting key
+    assert obj.extras
+    assert "const" in obj.extras
+    assert obj.has_ref_with_schema_keywords is True
+
+
+def test_has_ref_with_schema_keywords_extras_with_metadata_only_keys() -> None:
+    """Test has_ref_with_schema_keywords when extras contains only metadata keys."""
+    # $comment is metadata-only, should not trigger merge
+    obj = JsonSchemaObject.parse_obj({
+        "$ref": "#/$defs/Base",
+        "$comment": "this is a comment",
+    })
+    # Verify extras contains only metadata key
+    assert obj.extras
+    assert "$comment" in obj.extras
+    assert obj.has_ref_with_schema_keywords is False
+
+
+def test_has_ref_with_schema_keywords_no_extras() -> None:
+    """Test has_ref_with_schema_keywords when extras is empty."""
+    # Only $ref and a schema-affecting field, no extras
+    obj = JsonSchemaObject.parse_obj({
+        "$ref": "#/$defs/Base",
+        "minLength": 10,
+    })
+    # Verify extras is empty but minLength triggers merge
+    assert not obj.extras
+    assert obj.has_ref_with_schema_keywords is True
+
+
+def test_parse_combined_schema_anyof_with_ref_and_schema_keywords() -> None:
+    """Test parse_combined_schema merges $ref with schema-affecting keywords in anyOf."""
+    parser = JsonSchemaParser("")
+    schema = {
+        "$schema": "http://json-schema.org/draft-07/schema#",
+        "type": "object",
+        "properties": {
+            "value": {
+                "anyOf": [
+                    {
+                        "$ref": "#/$defs/BaseString",
+                        "minLength": 10,
+                    },
+                    {
+                        "type": "integer",
+                    },
+                ]
+            }
+        },
+        "$defs": {
+            "BaseString": {
+                "type": "string",
+                "maxLength": 100,
+            }
+        },
+    }
+    parser.parse_raw_obj("Model", schema, [])
+    results = list(parser.results)
+    assert len(results) >= 1
+
+
+def test_parse_enum_empty_enum_not_nullable() -> None:
+    """Test parse_enum returns null type when enum_fields is empty and not nullable."""
+    parser = JsonSchemaParser("")
+    obj = JsonSchemaObject.parse_obj({"type": "integer", "enum": []})
+    result = parser.parse_enum("EmptyEnum", obj, ["EmptyEnum"])
+    assert result.type == "None"
+
+
+@pytest.mark.parametrize(
+    ("schema", "expected"),
+    [
+        ({"type": "array", "items": {"type": "string"}}, False),
+        ({"allOf": [{"type": "string"}]}, False),
+        ({"oneOf": [{"type": "string"}]}, False),
+        ({"anyOf": [{"type": "string"}]}, False),
+        ({"properties": {"name": {"type": "string"}}}, False),
+        ({"patternProperties": {".*": {"type": "string"}}}, False),
+        ({"type": "object"}, False),
+        ({"enum": ["a", "b"]}, False),
+        ({"type": "string"}, True),
+        ({"type": "string", "minLength": 1}, True),
+    ],
+)
+def test_is_root_model_schema(schema: dict[str, Any], expected: bool) -> None:
+    """Test _is_root_model_schema returns correct value for various schema types."""
+    parser = JsonSchemaParser("")
+    obj = JsonSchemaObject.parse_obj(schema)
+    assert parser._is_root_model_schema(obj) is expected
+
+
+def test_merge_primitive_schemas_for_allof_single_item() -> None:
+    """Test _merge_primitive_schemas_for_allof returns unchanged item when single."""
+    parser = JsonSchemaParser("")
+    item = JsonSchemaObject.parse_obj({"type": "string", "minLength": 1})
+    result = parser._merge_primitive_schemas_for_allof([item])
+    assert result == item
+
+
+def test_merge_primitive_schemas_for_allof_nomerge_mode() -> None:
+    """Test _merge_primitive_schemas_for_allof overwrites constraints in NoMerge mode."""
+    parser = JsonSchemaParser("")
+    parser.allof_merge_mode = AllOfMergeMode.NoMerge
+    items = [
+        JsonSchemaObject.parse_obj({"type": "string", "pattern": "^a.*"}),
+        JsonSchemaObject.parse_obj({"minLength": 5}),
+    ]
+    result = parser._merge_primitive_schemas_for_allof(items)
+    assert result.pattern == "^a.*"
+    assert result.minLength == 5
+
+
+def test_merge_primitive_schemas_for_allof_nomerge_mode_with_format() -> None:
+    """Test _merge_primitive_schemas_for_allof handles format in NoMerge mode."""
+    parser = JsonSchemaParser("")
+    parser.allof_merge_mode = AllOfMergeMode.NoMerge
+    items = [
+        JsonSchemaObject.parse_obj({"type": "string"}),
+        JsonSchemaObject.parse_obj({"format": "email"}),
+    ]
+    result = parser._merge_primitive_schemas_for_allof(items)
+    assert result.format == "email"
+
+
+def test_merge_primitive_schemas_for_allof_constraints_mode_with_format() -> None:
+    """Test _merge_primitive_schemas_for_allof handles format in Constraints mode."""
+    parser = JsonSchemaParser("")
+    parser.allof_merge_mode = AllOfMergeMode.Constraints
+    items = [
+        JsonSchemaObject.parse_obj({"type": "string", "pattern": "^a.*"}),
+        JsonSchemaObject.parse_obj({"format": "email"}),
+    ]
+    result = parser._merge_primitive_schemas_for_allof(items)
+    assert result.format == "email"
+
+
+def test_handle_allof_root_model_special_path_marker() -> None:
+    """Test _handle_allof_root_model_with_constraints returns None for special path."""
+    parser = JsonSchemaParser("")
+    obj = JsonSchemaObject.parse_obj({
+        "allOf": [
+            {"$ref": "#/definitions/Base"},
+            {"minLength": 1},
+        ]
+    })
+    path = [f"test{SPECIAL_PATH_MARKER}inline"]
+    result = parser._handle_allof_root_model_with_constraints("Test", obj, path)
+    assert result is None
+
+
+def test_handle_allof_root_model_multiple_refs() -> None:
+    """Test _handle_allof_root_model_with_constraints returns None for multiple refs."""
+    parser = JsonSchemaParser("")
+    obj = JsonSchemaObject.parse_obj({
+        "allOf": [
+            {"$ref": "#/definitions/Base1"},
+            {"$ref": "#/definitions/Base2"},
+        ]
+    })
+    result = parser._handle_allof_root_model_with_constraints("Test", obj, ["test"])
+    assert result is None
+
+
+def test_handle_allof_root_model_no_refs() -> None:
+    """Test _handle_allof_root_model_with_constraints returns None when no refs."""
+    parser = JsonSchemaParser("")
+    obj = JsonSchemaObject.parse_obj({
+        "allOf": [
+            {"type": "string"},
+            {"minLength": 1},
+        ]
+    })
+    result = parser._handle_allof_root_model_with_constraints("Test", obj, ["test"])
+    assert result is None
+
+
+def test_handle_allof_root_model_no_constraint_items() -> None:
+    """Test _handle_allof_root_model_with_constraints returns None when no constraints."""
+    parser = JsonSchemaParser("")
+    parser._load_ref_schema_object = lambda _ref: JsonSchemaObject.parse_obj({"type": "string"})
+    obj = JsonSchemaObject.parse_obj({
+        "allOf": [
+            {"$ref": "#/definitions/Base"},
+        ]
+    })
+    result = parser._handle_allof_root_model_with_constraints("Test", obj, ["test"])
+    assert result is None
+
+
+def test_handle_allof_root_model_constraint_with_properties() -> None:
+    """Test _handle_allof_root_model_with_constraints returns None when constraint has properties."""
+    parser = JsonSchemaParser("")
+    parser._load_ref_schema_object = lambda _ref: JsonSchemaObject.parse_obj({"type": "string"})
+    obj = JsonSchemaObject.parse_obj({
+        "allOf": [
+            {"$ref": "#/definitions/Base"},
+            {"properties": {"name": {"type": "string"}}},
+        ]
+    })
+    result = parser._handle_allof_root_model_with_constraints("Test", obj, ["test"])
+    assert result is None
+
+
+def test_handle_allof_root_model_constraint_with_items() -> None:
+    """Test _handle_allof_root_model_with_constraints returns None when constraint has items."""
+    parser = JsonSchemaParser("")
+    parser._load_ref_schema_object = lambda _ref: JsonSchemaObject.parse_obj({"type": "string"})
+    obj = JsonSchemaObject.parse_obj({
+        "allOf": [
+            {"$ref": "#/definitions/Base"},
+            {"items": {"type": "string"}},
+        ]
+    })
+    result = parser._handle_allof_root_model_with_constraints("Test", obj, ["test"])
+    assert result is None
+
+
+def test_handle_allof_root_model_incompatible_types() -> None:
+    """Test _handle_allof_root_model_with_constraints returns None for incompatible types."""
+    parser = JsonSchemaParser("")
+    parser._load_ref_schema_object = lambda _ref: JsonSchemaObject.parse_obj({"type": "string"})
+    obj = JsonSchemaObject.parse_obj({
+        "allOf": [
+            {"$ref": "#/definitions/Base"},
+            {"type": "boolean"},
+        ]
+    })
+    result = parser._handle_allof_root_model_with_constraints("Test", obj, ["test"])
+    assert result is None
+
+
+def test_handle_allof_root_model_ref_to_non_root() -> None:
+    """Test _handle_allof_root_model_with_constraints returns None when ref is not root model."""
+    parser = JsonSchemaParser("")
+    parser._load_ref_schema_object = lambda _ref: JsonSchemaObject.parse_obj({
+        "type": "object",
+        "properties": {"id": {"type": "integer"}},
+    })
+    obj = JsonSchemaObject.parse_obj({
+        "allOf": [
+            {"$ref": "#/definitions/Base"},
+            {"minLength": 1},
+        ]
+    })
+    result = parser._handle_allof_root_model_with_constraints("Test", obj, ["test"])
+    assert result is None
diff -pruN 0.26.4-3/tests/parser/test_openapi.py 0.45.0-1/tests/parser/test_openapi.py
--- 0.26.4-3/tests/parser/test_openapi.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/parser/test_openapi.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,948 @@
+"""Tests for OpenAPI/Swagger schema parser."""
+
+from __future__ import annotations
+
+import os
+import platform
+from pathlib import Path
+from typing import Any
+
+import black
+import pydantic
+import pytest
+from packaging import version
+
+from datamodel_code_generator import OpenAPIScope, PythonVersionMin
+from datamodel_code_generator.model import DataModelFieldBase
+from datamodel_code_generator.model.pydantic import DataModelField
+from datamodel_code_generator.parser.base import dump_templates
+from datamodel_code_generator.parser.jsonschema import JsonSchemaObject
+from datamodel_code_generator.parser.openapi import (
+    MediaObject,
+    OpenAPIParser,
+    ParameterObject,
+    RequestBodyObject,
+    ResponseObject,
+)
+from tests.conftest import assert_output, assert_parser_modules, assert_parser_results
+
+DATA_PATH: Path = Path(__file__).parents[1] / "data" / "openapi"
+
+EXPECTED_OPEN_API_PATH = Path(__file__).parents[1] / "data" / "expected" / "parser" / "openapi"
+
+
+def get_expected_file(
+    test_name: str,
+    with_import: bool,
+    format_: bool,
+    base_class: str | None = None,
+    prefix: str | None = None,
+) -> Path:
+    """Get expected output file path for test."""
+    params: list[str] = []
+    if with_import:
+        params.append("with_import")
+    if format_:
+        params.append("format")
+    if base_class:
+        params.append(base_class)
+    file_name = "_".join(params or "output")
+
+    return EXPECTED_OPEN_API_PATH / test_name / (prefix or "") / f"{file_name}.py"
+
+
+@pytest.mark.parametrize(
+    ("source_obj", "generated_classes"),
+    [
+        (
+            {"properties": {"name": {"type": "string"}}},
+            """class Pets(BaseModel):
+    name: Optional[str] = None""",
+        ),
+        (
+            {
+                "properties": {
+                    "kind": {
+                        "type": "object",
+                        "properties": {"name": {"type": "string"}},
+                    }
+                }
+            },
+            """class Kind(BaseModel):
+    name: Optional[str] = None
+
+
+class Pets(BaseModel):
+    kind: Optional[Kind] = None""",
+        ),
+        (
+            {
+                "properties": {
+                    "Kind": {
+                        "type": "object",
+                        "properties": {"name": {"type": "string"}},
+                    }
+                }
+            },
+            """class Kind(BaseModel):
+    name: Optional[str] = None
+
+
+class Pets(BaseModel):
+    Kind: Optional[Kind] = None""",
+        ),
+        (
+            {
+                "properties": {
+                    "pet_kind": {
+                        "type": "object",
+                        "properties": {"name": {"type": "string"}},
+                    }
+                }
+            },
+            """class PetKind(BaseModel):
+    name: Optional[str] = None
+
+
+class Pets(BaseModel):
+    pet_kind: Optional[PetKind] = None""",
+        ),
+        (
+            {
+                "properties": {
+                    "kind": {
+                        "type": "array",
+                        "items": [
+                            {
+                                "type": "object",
+                                "properties": {"name": {"type": "string"}},
+                            }
+                        ],
+                    }
+                }
+            },
+            """class KindItem(BaseModel):
+    name: Optional[str] = None
+
+
+class Pets(BaseModel):
+    kind: Optional[List[KindItem]] = None""",
+        ),
+        (
+            {"properties": {"kind": {"type": "array", "items": []}}},
+            """class Pets(BaseModel):
+    kind: Optional[List[Any]] = None""",
+        ),
+    ],
+)
+def test_parse_object(source_obj: dict[str, Any], generated_classes: str) -> None:
+    """Test parsing OpenAPI object schemas."""
+    parser = OpenAPIParser("")
+    parser.parse_object("Pets", JsonSchemaObject.parse_obj(source_obj), [])
+    assert dump_templates(list(parser.results)) == generated_classes
+
+
+@pytest.mark.parametrize(
+    ("source_obj", "generated_classes"),
+    [
+        (
+            {
+                "type": "array",
+                "items": {"type": "object", "properties": {"name": {"type": "string"}}},
+            },
+            """class Pet(BaseModel):
+    name: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]""",
+        ),
+        (
+            {
+                "type": "array",
+                "items": [{"type": "object", "properties": {"name": {"type": "string"}}}],
+            },
+            """class Pet(BaseModel):
+    name: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]""",
+        ),
+        (
+            {
+                "type": "array",
+                "items": {},
+            },
+            """class Pets(BaseModel):
+    __root__: List[Any]""",
+        ),
+    ],
+)
+def test_parse_array(source_obj: dict[str, Any], generated_classes: str) -> None:
+    """Test parsing OpenAPI array schemas."""
+    parser = OpenAPIParser("")
+    parser.parse_array("Pets", JsonSchemaObject.parse_obj(source_obj), [])
+    assert dump_templates(list(parser.results)) == generated_classes
+
+
+@pytest.mark.parametrize(
+    ("with_import", "format_", "base_class"),
+    [
+        (
+            True,
+            True,
+            None,
+        ),
+        (
+            False,
+            True,
+            None,
+        ),
+        (
+            True,
+            False,
+            None,
+        ),
+        (True, True, "custom_module.Base"),
+    ],
+)
+def test_openapi_parser_parse(with_import: bool, format_: bool, base_class: str | None) -> None:
+    """Test OpenAPI parser with various configurations."""
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source=Path(DATA_PATH / "api.yaml"),
+        base_class=base_class,
+    )
+    expected_file = get_expected_file("openapi_parser_parse", with_import, format_, base_class)
+    assert_output(parser.parse(with_import=with_import, format_=format_, settings_path=DATA_PATH.parent), expected_file)
+
+
+@pytest.mark.parametrize(
+    ("source_obj", "generated_classes"),
+    [
+        (
+            {"type": "string", "nullable": True},
+            """class Name(BaseModel):
+    __root__: Optional[str] = None""",
+        ),
+        (
+            {"type": "string", "nullable": False},
+            """class Name(BaseModel):
+    __root__: str""",
+        ),
+    ],
+)
+def test_parse_root_type(source_obj: dict[str, Any], generated_classes: str) -> None:
+    """Test parsing OpenAPI root type schemas."""
+    parser = OpenAPIParser("")
+    parser.parse_root_type("Name", JsonSchemaObject.parse_obj(source_obj), [])
+    assert dump_templates(list(parser.results)) == generated_classes
+
+
+def test_openapi_parser_parse_duplicate_models(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI with duplicate model names."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "duplicate_models.yaml"),
+    )
+    assert_output(parser.parse(), EXPECTED_OPEN_API_PATH / "openapi_parser_parse_duplicate_models" / "output.py")
+
+
+def test_openapi_parser_parse_duplicate_model_with_simplify(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI with duplicate models and simplification."""
+    monkeypatch.chdir(tmp_path)
+    raw = Path(DATA_PATH / "duplicate_model_simplify.yaml")
+    parser = OpenAPIParser(raw)
+    assert_output(
+        parser.parse(), EXPECTED_OPEN_API_PATH / "openapi_parser_parse_duplicate_models_simplify" / "output.py"
+    )
+
+
+def test_openapi_parser_parse_resolved_models(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI with resolved model references."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "resolved_models.yaml"),
+    )
+    assert_output(parser.parse(), EXPECTED_OPEN_API_PATH / "openapi_parser_parse_resolved_models" / "output.py")
+
+
+def test_openapi_parser_parse_lazy_resolved_models(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI with lazy resolved model references."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "lazy_resolved_models.yaml"),
+    )
+    assert (
+        parser.parse()
+        == """from __future__ import annotations
+
+from typing import List, Optional
+
+from pydantic import BaseModel
+
+
+class Pet(BaseModel):
+    id: int
+    name: str
+    tag: Optional[str] = None
+
+
+class Pets(BaseModel):
+    __root__: List[Pet]
+
+
+class Error(BaseModel):
+    code: int
+    message: str
+
+
+class Event(BaseModel):
+    name: Optional[str] = None
+    event: Optional[Event] = None
+
+
+class Events(BaseModel):
+    __root__: List[Event]
+
+
+class Results(BaseModel):
+    envets: Optional[List[Events]] = None
+    event: Optional[List[Event]] = None
+"""
+    )
+
+
+def test_openapi_parser_parse_x_enum_varnames(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI with x-enum-varnames extension."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "x_enum_varnames.yaml"),
+    )
+    assert (
+        parser.parse()
+        == """from __future__ import annotations
+
+from enum import Enum
+
+
+class String(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class UnknownTypeString(Enum):
+    dog = 'dog'
+    cat = 'cat'
+    snake = 'snake'
+
+
+class NamedString(Enum):
+    EQ = '='
+    NE = '!='
+    GT = '>'
+    LT = '<'
+    GE = '>='
+    LE = '<='
+
+
+class NamedNumber(Enum):
+    one = 1
+    two = 2
+    three = 3
+
+
+class Number(Enum):
+    number_1 = 1
+    number_2 = 2
+    number_3 = 3
+
+
+class UnknownTypeNumber(Enum):
+    int_1 = 1
+    int_2 = 2
+    int_3 = 3
+"""
+    )
+
+
+@pytest.mark.skipif(pydantic.VERSION < "1.9.0", reason="Require Pydantic version 1.9.0 or later ")
+def test_openapi_parser_parse_enum_models() -> None:
+    """Test parsing OpenAPI enum models."""
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "enum_models.yaml").read_text(encoding="utf-8"),
+        target_python_version=PythonVersionMin,
+    )
+    expected_dir = EXPECTED_OPEN_API_PATH / "openapi_parser_parse_enum_models"
+    assert_output(parser.parse(), expected_dir / "output.py")
+
+
+def test_openapi_parser_parse_anyof(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI with anyOf schemas."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "anyof.yaml"),
+    )
+    assert_output(parser.parse(), EXPECTED_OPEN_API_PATH / "openapi_parser_parse_anyof" / "output.py")
+
+
+def test_openapi_parser_parse_anyof_required(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI with anyOf and required fields."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "anyof_required.yaml"),
+    )
+    assert_output(parser.parse(), EXPECTED_OPEN_API_PATH / "openapi_parser_parse_anyof_required" / "output.py")
+
+
+def test_openapi_parser_parse_nested_anyof(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI with nested anyOf schemas."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "nested_anyof.yaml").read_text(encoding="utf-8"),
+    )
+    assert_output(parser.parse(), EXPECTED_OPEN_API_PATH / "openapi_parser_parse_nested_anyof" / "output.py")
+
+
+def test_openapi_parser_parse_oneof(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI with oneOf schemas."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "oneof.yaml"),
+    )
+    assert_output(parser.parse(), EXPECTED_OPEN_API_PATH / "openapi_parser_parse_oneof" / "output.py")
+
+
+def test_openapi_parser_parse_nested_oneof(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI with nested oneOf schemas."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "nested_oneof.yaml").read_text(encoding="utf-8"),
+    )
+    assert_output(parser.parse(), EXPECTED_OPEN_API_PATH / "openapi_parser_parse_nested_oneof" / "output.py")
+
+
+def test_openapi_parser_parse_allof_ref(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI with allOf references."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "allof_same_prefix_with_ref.yaml"),
+    )
+    assert_output(
+        parser.parse(), EXPECTED_OPEN_API_PATH / "openapi_parser_parse_allof_same_prefix_with_ref" / "output.py"
+    )
+
+
+def test_openapi_parser_parse_allof(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI with allOf schemas."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "allof.yaml"),
+    )
+    assert_output(parser.parse(), EXPECTED_OPEN_API_PATH / "openapi_parser_parse_allof" / "output.py")
+
+
+def test_openapi_parser_parse_allof_required_fields(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI with allOf and required fields."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "allof_required_fields.yaml"),
+    )
+    assert_output(parser.parse(), EXPECTED_OPEN_API_PATH / "openapi_parser_parse_allof_required_fields" / "output.py")
+
+
+def test_openapi_parser_parse_alias(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI with field aliases."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "alias.yaml"),
+    )
+    delimiter = "\\" if platform.system() == "Windows" else "/"
+    results = {delimiter.join(p): r for p, r in parser.parse().items()}
+    assert_parser_results(results, EXPECTED_OPEN_API_PATH / "openapi_parser_parse_alias")
+
+
+def test_openapi_parser_parse_modular(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI with modular structure."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(Path(DATA_PATH / "modular.yaml"), data_model_field_type=DataModelFieldBase)
+    modules = parser.parse()
+    assert_parser_modules(modules, EXPECTED_OPEN_API_PATH / "openapi_parser_parse_modular")
+
+
+@pytest.mark.parametrize(
+    ("with_import", "format_", "base_class"),
+    [
+        (
+            True,
+            True,
+            None,
+        ),
+        (
+            False,
+            True,
+            None,
+        ),
+        (
+            True,
+            False,
+            None,
+        ),
+        (
+            True,
+            True,
+            "custom_module.Base",
+        ),
+    ],
+)
+def test_openapi_parser_parse_additional_properties(with_import: bool, format_: bool, base_class: str | None) -> None:
+    """Test parsing OpenAPI with additional properties."""
+    parser = OpenAPIParser(
+        Path(DATA_PATH / "additional_properties.yaml").read_text(encoding="utf-8"),
+        base_class=base_class,
+        data_model_field_type=DataModelFieldBase,
+    )
+
+    assert_output(
+        parser.parse(with_import=with_import, format_=format_, settings_path=DATA_PATH.parent),
+        get_expected_file(
+            "openapi_parser_parse_additional_properties",
+            with_import,
+            format_,
+            base_class,
+        ),
+    )
+
+
+def test_openapi_parser_parse_array_enum(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI with array enum types."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(source=Path(DATA_PATH / "array_enum.yaml"))
+    expected_file = get_expected_file("openapi_parser_parse_array_enum", True, True)
+    assert_output(parser.parse(), expected_file)
+
+
+def test_openapi_parser_parse_remote_ref(tmp_path: Path, monkeypatch: pytest.MonkeyPatch, mocker: Any) -> None:
+    """Test parsing OpenAPI with remote references."""
+    monkeypatch.chdir(tmp_path)
+
+    remote_schema = """
+schemas:
+  Problem:
+    properties:
+      detail:
+        description: A human readable explanation specific to this occurrence of the problem.
+        type: string
+      instance:
+        description: An absolute URI that identifies the specific occurrence of the problem.
+        format: uri
+        type: string
+      status:
+        description: The HTTP status code generated by the origin server for this occurrence of the problem.
+        exclusiveMaximum: true
+        format: int32
+        maximum: 600
+        minimum: 100
+        type: integer
+      title:
+        description: A short, summary of the problem type.
+        type: string
+      type:
+        default: about:blank
+        description: An absolute URI that identifies the problem type.
+        format: uri
+        type: string
+    type: object
+"""
+    mock_response = mocker.Mock()
+    mock_response.text = remote_schema
+    mocker.patch("httpx.get", return_value=mock_response)
+
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source=(DATA_PATH / "refs.yaml").read_text(),
+        http_ignore_tls=bool(os.environ.get("HTTP_IGNORE_TLS")),
+    )
+    expected_file = get_expected_file("openapi_parser_parse_remote_ref", True, True)
+
+    assert_output(parser.parse(), expected_file)
+
+
+def test_openapi_parser_parse_required_null(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI with required nullable fields."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(source=Path(DATA_PATH / "required_null.yaml"))
+    assert_output(parser.parse(), EXPECTED_OPEN_API_PATH / "openapi_parser_parse_required_null" / "output.py")
+
+
+def test_openapi_model_resolver(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test OpenAPI model resolver functionality."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(source=(DATA_PATH / "api.yaml"))
+    parser.parse()
+
+    references = {
+        k: v.dict(
+            exclude={"source", "module_name", "actual_module_name"},
+        )
+        for k, v in parser.model_resolver.references.items()
+    }
+    assert references == {
+        "api.yaml#/components/schemas/Error": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Error",
+            "original_name": "Error",
+            "path": "api.yaml#/components/schemas/Error",
+        },
+        "api.yaml#/components/schemas/Event": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Event",
+            "original_name": "Event",
+            "path": "api.yaml#/components/schemas/Event",
+        },
+        "api.yaml#/components/schemas/Id": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Id",
+            "original_name": "Id",
+            "path": "api.yaml#/components/schemas/Id",
+        },
+        "api.yaml#/components/schemas/Pet": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Pet",
+            "original_name": "Pet",
+            "path": "api.yaml#/components/schemas/Pet",
+        },
+        "api.yaml#/components/schemas/Pets": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Pets",
+            "original_name": "Pets",
+            "path": "api.yaml#/components/schemas/Pets",
+        },
+        "api.yaml#/components/schemas/Result": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Result",
+            "original_name": "Result",
+            "path": "api.yaml#/components/schemas/Result",
+        },
+        "api.yaml#/components/schemas/Rules": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Rules",
+            "original_name": "Rules",
+            "path": "api.yaml#/components/schemas/Rules",
+        },
+        "api.yaml#/components/schemas/Users": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Users",
+            "original_name": "Users",
+            "path": "api.yaml#/components/schemas/Users",
+        },
+        "api.yaml#/components/schemas/Users/Users/0#-datamodel-code-generator-#-object-#-special-#": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "User",
+            "original_name": "Users",
+            "path": "api.yaml#/components/schemas/Users/Users/0#-datamodel-code-generator-#-object-#-special-#",
+        },
+        "api.yaml#/components/schemas/apis": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Apis",
+            "original_name": "apis",
+            "path": "api.yaml#/components/schemas/apis",
+        },
+        "api.yaml#/components/schemas/apis/apis/0#-datamodel-code-generator-#-object-#-special-#": {
+            "duplicate_name": None,
+            "loaded": True,
+            "name": "Api",
+            "original_name": "apis",
+            "path": "api.yaml#/components/schemas/apis/apis/0#-datamodel-code-generator-#-object-#-special-#",
+        },
+    }
+
+
+def test_openapi_parser_parse_any(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI with any type schemas."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source=Path(DATA_PATH / "any.yaml"),
+    )
+    assert_output(parser.parse(), EXPECTED_OPEN_API_PATH / "openapi_parser_parse_any" / "output.py")
+
+
+def test_openapi_parser_responses_without_content(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI responses without content."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source=Path(DATA_PATH / "body_and_parameters.yaml"),
+        openapi_scopes=[OpenAPIScope.Paths],
+        allow_responses_without_content=True,
+    )
+    assert_output(parser.parse(), EXPECTED_OPEN_API_PATH / "openapi_parser_responses_without_content" / "output.py")
+
+
+def test_openapi_parser_responses_with_tag(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test parsing OpenAPI responses with tags."""
+    monkeypatch.chdir(tmp_path)
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source=Path(DATA_PATH / "body_and_parameters.yaml"),
+        openapi_scopes=[OpenAPIScope.Tags, OpenAPIScope.Schemas, OpenAPIScope.Paths],
+    )
+    assert_output(parser.parse(), EXPECTED_OPEN_API_PATH / "openapi_parser_responses_with_tag" / "output.py")
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+def test_openapi_parser_with_query_parameters() -> None:
+    """Test parsing OpenAPI with query parameters."""
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source=Path(DATA_PATH / "query_parameters.yaml"),
+        openapi_scopes=[
+            OpenAPIScope.Parameters,
+            OpenAPIScope.Schemas,
+            OpenAPIScope.Paths,
+        ],
+    )
+    assert_output(parser.parse(), EXPECTED_OPEN_API_PATH / "openapi_parser_with_query_parameters" / "output.py")
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+def test_openapi_parser_with_include_path_parameters() -> None:
+    """Test parsing OpenAPI with included path parameters."""
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source=Path(DATA_PATH / "query_parameters.yaml"),
+        openapi_scopes=[
+            OpenAPIScope.Parameters,
+            OpenAPIScope.Schemas,
+            OpenAPIScope.Paths,
+        ],
+        include_path_parameters=True,
+    )
+    assert_output(
+        parser.parse(), EXPECTED_OPEN_API_PATH / "openapi_parser_with_query_parameters" / "with_path_params.py"
+    )
+
+
+def test_parse_all_parameters_duplicate_names_exception() -> None:
+    """Test parsing parameters with duplicate names raises exception."""
+    parser = OpenAPIParser("", include_path_parameters=True)
+    parameters = [
+        ParameterObject.parse_obj({"name": "duplicate_param", "in": "path", "schema": {"type": "string"}}),
+        ParameterObject.parse_obj({"name": "duplicate_param", "in": "query", "schema": {"type": "integer"}}),
+    ]
+
+    with pytest.raises(Exception) as exc_info:  # noqa: PT011
+        parser.parse_all_parameters("TestModel", parameters, ["test", "path"])
+
+    assert "Parameter name 'duplicate_param' is used more than once." in str(exc_info.value)
+
+
+@pytest.mark.skipif(
+    version.parse(pydantic.VERSION) < version.parse("2.9.0"),
+    reason="Require Pydantic version 2.0.0 or later ",
+)
+def test_openapi_parser_array_called_fields_with_one_of_items() -> None:
+    """Test parsing OpenAPI array fields with oneOf items."""
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelField,
+        source=Path(DATA_PATH / "array_called_fields_with_oneOf_items.yaml"),
+        openapi_scopes=[
+            OpenAPIScope.Parameters,
+            OpenAPIScope.Schemas,
+            OpenAPIScope.Paths,
+        ],
+        field_constraints=True,
+    )
+    assert_output(
+        parser.parse(),
+        EXPECTED_OPEN_API_PATH / "openapi_parser_parse_array_called_fields_with_oneOf_items" / "output.py",
+    )
+
+
+def test_additional_imports() -> None:
+    """Test that additional imports are inside imports container."""
+    new_parser = OpenAPIParser(source="", additional_imports=["collections.deque"])
+    assert len(new_parser.imports) == 1
+    assert new_parser.imports["collections"] == {"deque"}
+
+
+def test_no_additional_imports() -> None:
+    """Test that not additional imports are not affecting imports container."""
+    new_parser = OpenAPIParser(
+        source="",
+    )
+    assert len(new_parser.imports) == 0
+
+
+@pytest.mark.parametrize(
+    ("request_body_data", "expected_type_hints"),
+    [
+        pytest.param(
+            {"application/json": {"schema": {"type": "object", "properties": {"name": {"type": "string"}}}}},
+            {"application/json": "TestRequest"},
+            id="object_with_properties",
+        ),
+        pytest.param(
+            {
+                "application/json": {"schema": {"type": "object", "properties": {"name": {"type": "string"}}}},
+                "text/plain": {"schema": {"type": "string"}},
+            },
+            {"application/json": "TestRequest", "text/plain": "str"},
+            id="multiple_media_types",
+        ),
+        pytest.param(
+            {"application/json": {"schema": {"$ref": "#/components/schemas/RequestRef"}}},
+            {"application/json": "RequestRef"},
+            id="schema_reference",
+        ),
+        pytest.param(
+            {"application/json": {}},  # MediaObject with no schema
+            {},  # Should result in empty dict since no schema to process
+            id="missing_schema",
+        ),
+    ],
+)
+def test_parse_request_body_return(request_body_data: dict[str, Any], expected_type_hints: dict[str, str]) -> None:
+    """Test parsing request body returns correct type hints."""
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source="",
+        use_standard_collections=True,
+    )
+    result = parser.parse_request_body(
+        "TestRequest",
+        RequestBodyObject(
+            content={
+                media_type: MediaObject.parse_obj(media_data) for media_type, media_data in request_body_data.items()
+            }
+        ),
+        ["test", "path"],
+    )
+
+    assert isinstance(result, dict)
+    assert len(result) == len(expected_type_hints)
+    for media_type, expected_hint in expected_type_hints.items():
+        assert media_type in result
+        assert result[media_type].type_hint == expected_hint
+
+
+@pytest.mark.parametrize(
+    ("parameters_data", "expected_type_hint"),
+    [
+        pytest.param([], None, id="no_parameters"),
+        pytest.param(
+            [{"name": "search", "in": "query", "required": False, "schema": {"type": "string"}}],
+            "TestParametersQuery",
+            id="with_query_parameters",
+        ),
+        pytest.param(
+            [{"name": "userId", "in": "path", "required": True, "schema": {"type": "string"}}],
+            None,
+            id="path_parameter_only",
+        ),
+    ],
+)
+def test_parse_all_parameters_return(parameters_data: list[dict[str, Any]], expected_type_hint: str | None) -> None:
+    """Test parsing parameters returns correct type hints."""
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source="",
+        openapi_scopes=[OpenAPIScope.Parameters],
+    )
+    result = parser.parse_all_parameters(
+        "TestParametersQuery",
+        [ParameterObject.parse_obj(param_data) for param_data in parameters_data],
+        ["test", "path"],
+    )
+    if expected_type_hint is None:
+        assert result is None
+    else:
+        assert result is not None
+        assert result.type_hint == expected_type_hint
+
+
+@pytest.mark.parametrize(
+    ("responses_data", "expected_type_hints"),
+    [
+        pytest.param(
+            {
+                "200": {
+                    "description": "Success",
+                    "content": {"application/json": {"schema": {"type": "string"}}},
+                }
+            },
+            {"200": {"application/json": "str"}},
+            id="simple_response_with_schema",
+        ),
+        pytest.param(
+            {
+                "200": {
+                    "description": "Success",
+                    "content": {
+                        "application/json": {"schema": {"type": "object", "properties": {"name": {"type": "string"}}}},
+                        "text/plain": {"schema": {"type": "string"}},
+                    },
+                },
+                "400": {
+                    "description": "Bad Request",
+                    "content": {"text/plain": {"schema": {"type": "string"}}},
+                },
+            },
+            {"200": {"application/json": "TestResponse", "text/plain": "str"}, "400": {"text/plain": "str"}},
+            id="multiple_status_codes_and_content_types",
+        ),
+        pytest.param(
+            {
+                "200": {
+                    "description": "Success",
+                    "content": {"application/json": {}},  # Content but no schema
+                }
+            },
+            {},  # Should skip since no schema in content
+            id="response_with_no_schema",
+        ),
+    ],
+)
+def test_parse_responses_return(
+    responses_data: dict[str, dict[str, Any]],
+    expected_type_hints: dict[str, dict[str, str]],
+) -> None:
+    """Test parsing responses returns correct type hints."""
+    parser = OpenAPIParser(
+        data_model_field_type=DataModelFieldBase,
+        source="",
+    )
+
+    result = parser.parse_responses(
+        "TestResponse",
+        {status_code: ResponseObject.parse_obj(response_data) for status_code, response_data in responses_data.items()},
+        ["test", "path"],
+    )
+
+    assert isinstance(result, dict)
+    assert len(result) == len(expected_type_hints)
+    for status_code, expected_content_types in expected_type_hints.items():
+        assert status_code in result
+        assert len(result[status_code]) == len(expected_content_types)
+        for content_type, expected_type_hint in expected_content_types.items():
+            assert content_type in result[status_code]
+            assert result[status_code][content_type].type_hint == expected_type_hint
diff -pruN 0.26.4-3/tests/parser/test_scc.py 0.45.0-1/tests/parser/test_scc.py
--- 0.26.4-3/tests/parser/test_scc.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/parser/test_scc.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,388 @@
+"""Unit tests for SCC (Strongly Connected Components) detection module."""
+
+from __future__ import annotations
+
+from inline_snapshot import snapshot
+
+from datamodel_code_generator.parser._scc import (
+    find_circular_sccs,
+    strongly_connected_components,
+)
+
+
+def _to_sorted_result(sccs: list[set[tuple[str, ...]]]) -> list[list[tuple[str, ...]]]:
+    """Convert SCCs to sorted nested lists for deterministic snapshot comparison."""
+    return [sorted(scc) for scc in sccs]
+
+
+def test_scc_empty_graph() -> None:
+    """Empty graph."""
+    graph: dict[tuple[str, ...], set[tuple[str, ...]]] = {}
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([])
+
+
+def test_scc_single_node_without_edges() -> None:
+    """Graph: a (isolated)."""
+    graph = {("a",): set()}
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([[("a",)]])
+
+
+def test_scc_single_node_with_self_loop() -> None:
+    """Graph: a -> a."""
+    graph = {("a",): {("a",)}}
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([[("a",)]])
+
+
+def test_scc_bidirectional_edge_pair() -> None:
+    """Graph: a <-> b."""
+    graph = {
+        ("a",): {("b",)},
+        ("b",): {("a",)},
+    }
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([[("a",), ("b",)]])
+
+
+def test_scc_triangular_cycle() -> None:
+    """Graph: a -> b -> c -> a."""
+    graph = {
+        ("a",): {("b",)},
+        ("b",): {("c",)},
+        ("c",): {("a",)},
+    }
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([[("a",), ("b",), ("c",)]])
+
+
+def test_scc_linear_chain() -> None:
+    """Graph: a -> b -> c (acyclic)."""
+    graph = {
+        ("a",): {("b",)},
+        ("b",): {("c",)},
+        ("c",): set(),
+    }
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([[("c",)], [("b",)], [("a",)]])
+
+
+def test_scc_two_independent_cycles() -> None:
+    """Graph: a <-> b / x <-> y (disconnected)."""
+    graph = {
+        ("a",): {("b",)},
+        ("b",): {("a",)},
+        ("x",): {("y",)},
+        ("y",): {("x",)},
+    }
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([[("a",), ("b",)], [("x",), ("y",)]])
+
+
+def test_scc_edge_only_node() -> None:
+    """Graph: a -> b (b only referenced as edge target)."""
+    graph = {
+        ("a",): {("b",)},
+    }
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([[("b",)], [("a",)]])
+
+
+def test_scc_nested_cycle() -> None:
+    """Graph: a -> b,d / b <-> c / d (isolated)."""
+    graph = {
+        ("a",): {("b",), ("d",)},
+        ("b",): {("c",)},
+        ("c",): {("b",)},
+        ("d",): set(),
+    }
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([[("b",), ("c",)], [("d",)], [("a",)]])
+
+
+def test_scc_deterministic_results() -> None:
+    """Graph: z <-> y / a <-> b (verify determinism across 5 runs)."""
+    graph = {
+        ("z",): {("y",)},
+        ("y",): {("z",)},
+        ("a",): {("b",)},
+        ("b",): {("a",)},
+    }
+    results = [_to_sorted_result(strongly_connected_components(graph)) for _ in range(5)]
+    for i in range(1, 5):
+        assert results[i] == results[0]
+
+
+def test_scc_phase1_multiple_unvisited_neighbors() -> None:
+    """Graph: a -> b,c,d / b -> a / c -> a / d (isolated)."""
+    graph = {
+        ("a",): {("b",), ("c",), ("d",)},
+        ("b",): {("a",)},
+        ("c",): {("a",)},
+        ("d",): set(),
+    }
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([[("d",)], [("a",), ("b",), ("c",)]])
+
+
+def test_scc_phase1_on_stack_neighbor() -> None:
+    """Graph: a -> b -> c,d / c -> a / d -> b."""
+    graph = {
+        ("a",): {("b",)},
+        ("b",): {("c",), ("d",)},
+        ("c",): {("a",)},
+        ("d",): {("b",)},
+    }
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([[("a",), ("b",), ("c",), ("d",)]])
+
+
+def test_scc_deep_graph_iterative() -> None:
+    """100-node chain with terminal cycle n98 <-> n99."""
+    graph: dict[tuple[str, ...], set[tuple[str, ...]]] = {}
+    for i in range(99):
+        graph[f"n{i}",] = {(f"n{i + 1}",)}
+    graph["n99",] = {("n98",)}
+
+    result = strongly_connected_components(graph)
+    multi_node_sccs = [scc for scc in result if len(scc) > 1]
+    assert _to_sorted_result(multi_node_sccs) == snapshot([[("n98",), ("n99",)]])
+
+
+def test_scc_realistic_module_path_tuples() -> None:
+    """Graph: (pkg, __init__) <-> (pkg, issuing)."""
+    graph = {
+        ("pkg", "__init__"): {("pkg", "issuing")},
+        ("pkg", "issuing"): {("pkg", "__init__")},
+    }
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([
+        [("pkg", "__init__"), ("pkg", "issuing")]
+    ])
+
+
+def test_scc_phase0_skips_indexed_neighbors() -> None:
+    """Graph: a -> b,c / b -> c / c (isolated)."""
+    graph = {
+        ("a",): {("b",), ("c",)},
+        ("b",): {("c",)},
+        ("c",): set(),
+    }
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([[("c",)], [("b",)], [("a",)]])
+
+
+def test_scc_phase1_scc_root_detection() -> None:
+    """Graph: a -> b,c / b -> d / c -> d / d -> a."""
+    graph = {
+        ("a",): {("b",), ("c",)},
+        ("b",): {("d",)},
+        ("c",): {("d",)},
+        ("d",): {("a",)},
+    }
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([[("a",), ("b",), ("c",), ("d",)]])
+
+
+def test_scc_phase1_later_on_stack_neighbor() -> None:
+    """Graph: a -> b,c,d / b -> c / c -> a / d (isolated)."""
+    graph = {
+        ("a",): {("b",), ("c",), ("d",)},
+        ("b",): {("c",)},
+        ("c",): {("a",)},
+        ("d",): set(),
+    }
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([[("d",)], [("a",), ("b",), ("c",)]])
+
+
+def test_scc_phase0_visited_not_on_stack_neighbor() -> None:
+    """Graph: a -> x / b -> a,x / x (isolated)."""
+    graph = {
+        ("a",): {("x",)},
+        ("b",): {("a",), ("x",)},
+        ("x",): set(),
+    }
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([[("x",)], [("a",)], [("b",)]])
+
+
+def test_scc_phase0_exhausts_neighbors_finds_root() -> None:
+    """Graph: a -> b / b (isolated)."""
+    graph = {
+        ("a",): {("b",)},
+        ("b",): set(),
+    }
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([[("b",)], [("a",)]])
+
+
+def test_scc_multi_node_scc_pops_all_members() -> None:
+    """Graph: a -> b -> c -> d -> a (4-node cycle)."""
+    graph = {
+        ("a",): {("b",)},
+        ("b",): {("c",)},
+        ("c",): {("d",)},
+        ("d",): {("a",)},
+    }
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([[("a",), ("b",), ("c",), ("d",)]])
+
+
+def test_scc_phase1_extraction_with_multiple_pops() -> None:
+    """Graph: a -> b -> c -> d -> e -> a (5-node cycle via phase 1 return)."""
+    graph = {
+        ("a",): {("b",)},
+        ("b",): {("c",)},
+        ("c",): {("d",)},
+        ("d",): {("e",)},
+        ("e",): {("a",)},
+    }
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([
+        [("a",), ("b",), ("c",), ("d",), ("e",)]
+    ])
+
+
+def test_scc_phase1_multiple_returns_in_call_stack() -> None:
+    """Graph: a -> b,c / b -> d / c -> d / d -> e / e -> a."""
+    graph = {
+        ("a",): {("b",), ("c",)},
+        ("b",): {("d",)},
+        ("c",): {("d",)},
+        ("d",): {("e",)},
+        ("e",): {("a",)},
+    }
+    assert _to_sorted_result(strongly_connected_components(graph)) == snapshot([
+        [("a",), ("b",), ("c",), ("d",), ("e",)]
+    ])
+
+
+def test_circular_empty_graph() -> None:
+    """Empty graph."""
+    graph: dict[tuple[str, ...], set[tuple[str, ...]]] = {}
+    assert _to_sorted_result(find_circular_sccs(graph)) == snapshot([])
+
+
+def test_circular_acyclic_graph() -> None:
+    """Graph: a -> b -> c (acyclic)."""
+    graph = {
+        ("a",): {("b",)},
+        ("b",): {("c",)},
+        ("c",): set(),
+    }
+    assert _to_sorted_result(find_circular_sccs(graph)) == snapshot([])
+
+
+def test_circular_self_loop_detected() -> None:
+    """Graph: a -> a."""
+    graph = {("a",): {("a",)}}
+    assert _to_sorted_result(find_circular_sccs(graph)) == snapshot([[("a",)]])
+
+
+def test_circular_single_node_without_self_loop() -> None:
+    """Graph: a (isolated)."""
+    graph = {("a",): set()}
+    assert _to_sorted_result(find_circular_sccs(graph)) == snapshot([])
+
+
+def test_circular_bidirectional_pair_detected() -> None:
+    """Graph: a <-> b."""
+    graph = {
+        ("a",): {("b",)},
+        ("b",): {("a",)},
+    }
+    assert _to_sorted_result(find_circular_sccs(graph)) == snapshot([[("a",), ("b",)]])
+
+
+def test_circular_multiple_independent_cycles_detected() -> None:
+    """Graph: a <-> b / x <-> y (disconnected)."""
+    graph = {
+        ("a",): {("b",)},
+        ("b",): {("a",)},
+        ("x",): {("y",)},
+        ("y",): {("x",)},
+    }
+    assert _to_sorted_result(find_circular_sccs(graph)) == snapshot([[("a",), ("b",)], [("x",), ("y",)]])
+
+
+def test_circular_results_sorted_by_minimum_element() -> None:
+    """Graph: z <-> y / a <-> b (verify sorted by min element)."""
+    graph = {
+        ("z",): {("y",)},
+        ("y",): {("z",)},
+        ("a",): {("b",)},
+        ("b",): {("a",)},
+    }
+    result = find_circular_sccs(graph)
+    assert min(result[0]) < min(result[1])
+    assert _to_sorted_result(result) == snapshot([[("a",), ("b",)], [("y",), ("z",)]])
+
+
+def test_circular_filters_acyclic_sccs() -> None:
+    """Graph: a <-> b / c -> d (mixed cyclic and acyclic)."""
+    graph = {
+        ("a",): {("b",)},
+        ("b",): {("a",)},
+        ("c",): {("d",)},
+        ("d",): set(),
+    }
+    assert _to_sorted_result(find_circular_sccs(graph)) == snapshot([[("a",), ("b",)]])
+
+
+def test_circular_edge_only_node_not_circular() -> None:
+    """Graph: a -> b (b only referenced as edge)."""
+    graph = {
+        ("a",): {("b",)},
+    }
+    assert _to_sorted_result(find_circular_sccs(graph)) == snapshot([])
+
+
+def test_circular_stripe_api_like_pattern() -> None:
+    """Graph: () <-> (issuing,)."""
+    graph = {
+        (): {("issuing",)},
+        ("issuing",): {()},
+    }
+    assert _to_sorted_result(find_circular_sccs(graph)) == snapshot([[(), ("issuing",)]])
+
+
+def test_circular_triangular_cycle_with_external_edge() -> None:
+    """Graph: a -> b,x / b -> c / c -> a / x (isolated)."""
+    graph = {
+        ("a",): {("b",), ("x",)},
+        ("b",): {("c",)},
+        ("c",): {("a",)},
+        ("x",): set(),
+    }
+    assert _to_sorted_result(find_circular_sccs(graph)) == snapshot([[("a",), ("b",), ("c",)]])
+
+
+def test_circular_iteration_over_multiple_sccs() -> None:
+    """Graph: a <-> b / c (isolated) / d -> d / e -> f -> g -> e."""
+    graph = {
+        ("a",): {("b",)},
+        ("b",): {("a",)},
+        ("c",): set(),
+        ("d",): {("d",)},
+        ("e",): {("f",)},
+        ("f",): {("g",)},
+        ("g",): {("e",)},
+    }
+    result = find_circular_sccs(graph)
+    sizes = sorted([len(scc) for scc in result])
+    assert sizes == snapshot([1, 2, 3])
+    assert _to_sorted_result(result) == snapshot([[("a",), ("b",)], [("d",)], [("e",), ("f",), ("g",)]])
+
+
+def test_circular_many_single_node_sccs_with_self_loops() -> None:
+    """Graph: a -> a / b -> b / c -> c / d -> d (multiple self-loop SCCs)."""
+    graph = {
+        ("a",): {("a",)},
+        ("b",): {("b",)},
+        ("c",): {("c",)},
+        ("d",): {("d",)},
+    }
+    result = find_circular_sccs(graph)
+    assert len(result) == snapshot(4)
+    assert _to_sorted_result(result) == snapshot([[("a",)], [("b",)], [("c",)], [("d",)]])
+
+
+def test_circular_mixed_scc_sizes_iteration() -> None:
+    """Graph: a (isolated) / b -> b / c <-> d / e -> f -> g -> h -> e."""
+    graph = {
+        ("a",): set(),
+        ("b",): {("b",)},
+        ("c",): {("d",)},
+        ("d",): {("c",)},
+        ("e",): {("f",)},
+        ("f",): {("g",)},
+        ("g",): {("h",)},
+        ("h",): {("e",)},
+    }
+    result = find_circular_sccs(graph)
+    assert len(result) == snapshot(3)
+    sizes = sorted([len(scc) for scc in result])
+    assert sizes == snapshot([1, 2, 4])
diff -pruN 0.26.4-3/tests/root_id.json 0.45.0-1/tests/root_id.json
--- 0.26.4-3/tests/root_id.json	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/root_id.json	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,24 @@
+{
+  "$id": "https://example.com/root_id.json",
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "Person": {
+      "$ref": "person.json"
+    },
+    "OriginalPerson": {
+      "$ref": "person.json"
+    },
+    "Pet": {
+      "type": "object",
+      "properties": {
+        "name": {
+          "type": "string",
+          "examples": ["dog", "cat"]
+        },
+        "owner": {
+           "$ref": "https://example.com/person.json"
+        }
+      }
+    }
+  }
+}
\ No newline at end of file
diff -pruN 0.26.4-3/tests/test_format.py 0.45.0-1/tests/test_format.py
--- 0.26.4-3/tests/test_format.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/test_format.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,214 @@
+"""Tests for code formatting functionality."""
+
+from __future__ import annotations
+
+import sys
+from pathlib import Path
+from unittest import mock
+
+import pytest
+
+from datamodel_code_generator.format import CodeFormatter, Formatter, PythonVersion, PythonVersionMin
+
+EXAMPLE_LICENSE_FILE = str(Path(__file__).parent / "data/python/custom_formatters/license_example.txt")
+
+UN_EXIST_FORMATTER = "tests.data.python.custom_formatters.un_exist"
+WRONG_FORMATTER = "tests.data.python.custom_formatters.wrong"
+NOT_SUBCLASS_FORMATTER = "tests.data.python.custom_formatters.not_subclass"
+ADD_COMMENT_FORMATTER = "tests.data.python.custom_formatters.add_comment"
+ADD_LICENSE_FORMATTER = "tests.data.python.custom_formatters.add_license"
+
+
+def test_python_version() -> None:
+    """Ensure that the python version used for the tests is properly listed."""
+    _ = PythonVersion("{}.{}".format(*sys.version_info[:2]))
+
+
+def test_python_version_has_native_deferred_annotations() -> None:
+    """Test that has_native_deferred_annotations returns correct values for each Python version."""
+    assert not PythonVersion.PY_39.has_native_deferred_annotations
+    assert not PythonVersion.PY_310.has_native_deferred_annotations
+    assert not PythonVersion.PY_311.has_native_deferred_annotations
+    assert not PythonVersion.PY_312.has_native_deferred_annotations
+    assert not PythonVersion.PY_313.has_native_deferred_annotations
+    assert PythonVersion.PY_314.has_native_deferred_annotations
+
+
+@pytest.mark.parametrize(
+    ("skip_string_normalization", "expected_output"),
+    [
+        (True, "a = 'b'"),
+        (False, 'a = "b"'),
+    ],
+)
+def test_format_code_with_skip_string_normalization(
+    skip_string_normalization: bool,
+    expected_output: str,
+    tmp_path: Path,
+    monkeypatch: pytest.MonkeyPatch,
+) -> None:
+    """Test code formatting with skip string normalization option."""
+    monkeypatch.chdir(tmp_path)
+    formatter = CodeFormatter(PythonVersionMin, skip_string_normalization=skip_string_normalization)
+
+    formatted_code = formatter.format_code("a = 'b'")
+
+    assert formatted_code == expected_output + "\n"
+
+
+def test_format_code_un_exist_custom_formatter() -> None:
+    """Test error when custom formatter module doesn't exist."""
+    with pytest.raises(ModuleNotFoundError):
+        _ = CodeFormatter(
+            PythonVersionMin,
+            custom_formatters=[UN_EXIST_FORMATTER],
+        )
+
+
+def test_format_code_invalid_formatter_name() -> None:
+    """Test error when custom formatter has no CodeFormatter class."""
+    with pytest.raises(NameError):
+        _ = CodeFormatter(
+            PythonVersionMin,
+            custom_formatters=[WRONG_FORMATTER],
+        )
+
+
+def test_format_code_is_not_subclass() -> None:
+    """Test error when custom formatter doesn't inherit CustomCodeFormatter."""
+    with pytest.raises(TypeError):
+        _ = CodeFormatter(
+            PythonVersionMin,
+            custom_formatters=[NOT_SUBCLASS_FORMATTER],
+        )
+
+
+def test_format_code_with_custom_formatter_without_kwargs(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test custom formatter that doesn't require kwargs."""
+    monkeypatch.chdir(tmp_path)
+    formatter = CodeFormatter(
+        PythonVersionMin,
+        custom_formatters=[ADD_COMMENT_FORMATTER],
+    )
+
+    formatted_code = formatter.format_code("x = 1\ny = 2")
+
+    assert formatted_code == "# a comment\nx = 1\ny = 2" + "\n"
+
+
+def test_format_code_with_custom_formatter_with_kwargs(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test custom formatter with kwargs."""
+    monkeypatch.chdir(tmp_path)
+    formatter = CodeFormatter(
+        PythonVersionMin,
+        custom_formatters=[ADD_LICENSE_FORMATTER],
+        custom_formatters_kwargs={"license_file": EXAMPLE_LICENSE_FILE},
+    )
+
+    formatted_code = formatter.format_code("x = 1\ny = 2")
+
+    assert (
+        formatted_code
+        == """# MIT License
+#
+# Copyright (c) 2023 Blah-blah
+#
+x = 1
+y = 2
+"""
+    )
+
+
+def test_format_code_with_two_custom_formatters(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test chaining multiple custom formatters."""
+    monkeypatch.chdir(tmp_path)
+    formatter = CodeFormatter(
+        PythonVersionMin,
+        custom_formatters=[
+            ADD_COMMENT_FORMATTER,
+            ADD_LICENSE_FORMATTER,
+        ],
+        custom_formatters_kwargs={"license_file": EXAMPLE_LICENSE_FILE},
+    )
+
+    formatted_code = formatter.format_code("x = 1\ny = 2")
+
+    assert (
+        formatted_code
+        == """# MIT License
+#
+# Copyright (c) 2023 Blah-blah
+#
+# a comment
+x = 1
+y = 2
+"""
+    )
+
+
+def test_format_code_ruff_format_formatter(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test ruff format formatter."""
+    monkeypatch.chdir(tmp_path)
+    formatter = CodeFormatter(
+        PythonVersionMin,
+        formatters=[Formatter.RUFF_FORMAT],
+    )
+    with mock.patch("subprocess.run") as mock_run:
+        mock_run.return_value.stdout = b"output"
+        formatted_code = formatter.format_code("input")
+
+    assert formatted_code == "output"
+    mock_run.assert_called_once_with(
+        ("ruff", "format", "-"), input=b"input", capture_output=True, check=False, cwd=str(tmp_path)
+    )
+
+
+def test_format_code_ruff_check_formatter(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test ruff check formatter with auto-fix."""
+    monkeypatch.chdir(tmp_path)
+    formatter = CodeFormatter(
+        PythonVersionMin,
+        formatters=[Formatter.RUFF_CHECK],
+    )
+    with mock.patch("subprocess.run") as mock_run:
+        mock_run.return_value.stdout = b"output"
+        formatted_code = formatter.format_code("input")
+
+    assert formatted_code == "output"
+    mock_run.assert_called_once_with(
+        ("ruff", "check", "--fix", "-"), input=b"input", capture_output=True, check=False, cwd=str(tmp_path)
+    )
+
+
+def test_settings_path_with_existing_file(tmp_path: Path) -> None:
+    """Test settings_path with existing file uses parent directory."""
+    pyproject = tmp_path / "pyproject.toml"
+    pyproject.write_text("[tool.black]\nline-length = 60\n", encoding="utf-8")
+    existing_file = tmp_path / "existing.py"
+    existing_file.write_text("", encoding="utf-8")
+
+    formatter = CodeFormatter(PythonVersionMin, settings_path=existing_file)
+
+    assert formatter.settings_path == str(tmp_path)
+
+
+def test_settings_path_with_nonexistent_file(tmp_path: Path) -> None:
+    """Test settings_path with nonexistent file uses existing parent."""
+    pyproject = tmp_path / "pyproject.toml"
+    pyproject.write_text("[tool.black]\nline-length = 60\n", encoding="utf-8")
+    nonexistent_file = tmp_path / "nonexistent.py"
+
+    formatter = CodeFormatter(PythonVersionMin, settings_path=nonexistent_file)
+
+    assert formatter.settings_path == str(tmp_path)
+
+
+def test_settings_path_with_deeply_nested_nonexistent_path(tmp_path: Path) -> None:
+    """Test settings_path with deeply nested nonexistent path finds existing ancestor."""
+    pyproject = tmp_path / "pyproject.toml"
+    pyproject.write_text("[tool.black]\nline-length = 60\n", encoding="utf-8")
+    nested_path = tmp_path / "a" / "b" / "c" / "nonexistent.py"
+
+    formatter = CodeFormatter(PythonVersionMin, settings_path=nested_path)
+
+    assert formatter.settings_path == str(tmp_path)
diff -pruN 0.26.4-3/tests/test_imports.py 0.45.0-1/tests/test_imports.py
--- 0.26.4-3/tests/test_imports.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/test_imports.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,98 @@
+"""Tests for import management functionality."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+import pytest
+
+from datamodel_code_generator.imports import Import, Imports
+
+if TYPE_CHECKING:
+    from collections.abc import Sequence
+
+
+@pytest.mark.parametrize(
+    ("inputs", "value"),
+    [
+        ([(None, "foo")], "import foo"),
+        ([(".", "foo")], "from . import foo"),
+        ([("bar", "foo")], "from bar import foo"),
+        ([("bar", "foo"), ("bar", "baz")], "from bar import baz, foo"),
+        ([("bar", "foo"), ("rab", "oof")], "from bar import foo\nfrom rab import oof"),
+        ([("bar", "foo"), ("bar", "foo")], "from bar import foo"),
+        ([(None, "foo.baz")], "import foo.baz"),
+    ],
+)
+def test_dump(inputs: Sequence[tuple[str | None, str]], value: str) -> None:
+    """Test creating import lines."""
+    imports = Imports()
+    imports.append(Import(from_=from_, import_=import_) for from_, import_ in inputs)
+
+    assert str(imports) == value
+
+
+def test_is_future_true() -> None:
+    """Test that __future__ imports are identified as future imports."""
+    import_ = Import(from_="__future__", import_="annotations")
+    assert import_.is_future is True
+
+
+def test_is_future_false_regular_import() -> None:
+    """Test that regular imports are not identified as future imports."""
+    import_ = Import(from_="typing", import_="Optional")
+    assert import_.is_future is False
+
+
+def test_is_future_false_no_from() -> None:
+    """Test that imports without from_ are not identified as future imports."""
+    import_ = Import(from_=None, import_="os")
+    assert import_.is_future is False
+
+
+def test_extract_future_with_future_imports() -> None:
+    """Test extracting future imports from mixed imports."""
+    imports = Imports()
+    imports.append(Import(from_="__future__", import_="annotations"))
+    imports.append(Import(from_="typing", import_="Optional"))
+
+    future = imports.extract_future()
+
+    assert str(future) == "from __future__ import annotations"
+    assert str(imports) == "from typing import Optional"
+    assert "__future__" not in imports
+
+
+def test_extract_future_no_future_imports() -> None:
+    """Test extracting from imports without future imports."""
+    imports = Imports()
+    imports.append(Import(from_="typing", import_="Optional"))
+
+    future = imports.extract_future()
+
+    assert not str(future)
+    assert str(imports) == "from typing import Optional"
+
+
+def test_extract_future_only_future_imports() -> None:
+    """Test extracting when only future imports exist."""
+    imports = Imports()
+    imports.append(Import(from_="__future__", import_="annotations"))
+
+    future = imports.extract_future()
+
+    assert str(future) == "from __future__ import annotations"
+    assert not str(imports)
+
+
+def test_extract_future_with_alias() -> None:
+    """Test extracting future imports with alias (edge case)."""
+    imports = Imports()
+    imports.append(Import(from_="__future__", import_="annotations", alias="ann"))
+    imports.append(Import(from_="typing", import_="Optional"))
+
+    future = imports.extract_future()
+
+    assert "annotations as ann" in str(future)
+    assert "__future__" not in imports
+    assert "__future__" not in imports.alias
diff -pruN 0.26.4-3/tests/test_infer_input_type.py 0.45.0-1/tests/test_infer_input_type.py
--- 0.26.4-3/tests/test_infer_input_type.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/test_infer_input_type.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,81 @@
+"""Tests for input type inference functionality."""
+
+from __future__ import annotations
+
+from pathlib import Path
+
+import pytest
+
+from datamodel_code_generator import Error, InputFileType, infer_input_type
+
+DATA_PATH: Path = Path(__file__).parent / "data"
+
+
+def test_infer_input_type() -> None:  # noqa: PLR0912
+    """Test automatic input type detection for various file formats."""
+
+    def assert_infer_input_type(file: Path, raw_data_type: InputFileType) -> None:
+        __tracebackhide__ = True
+        if file.is_dir():
+            return
+        if file.suffix not in {".yaml", ".json"}:
+            return
+        result = infer_input_type(file.read_text(encoding="utf-8"))
+        assert result == raw_data_type, f"{file} was the wrong type!"
+
+    def assert_invalid_infer_input_type(file: Path) -> None:
+        with pytest.raises(
+            Error,
+            match=(
+                r"Can't infer input file type from the input data. "
+                r"Please specify the input file type explicitly with --input-file-type option."
+            ),
+        ):
+            infer_input_type(file.read_text(encoding="utf-8"))
+
+    for file in (DATA_PATH / "csv").rglob("*"):
+        assert_infer_input_type(file, InputFileType.CSV)
+
+    for file in (DATA_PATH / "json").rglob("*"):
+        if file.name.endswith("broken.json"):
+            continue
+        assert_infer_input_type(file, InputFileType.Json)
+    for file in (DATA_PATH / "jsonschema").rglob("*"):
+        if file.name.endswith((
+            "external_child.json",
+            "external_child.yaml",
+            "extra_data_msgspec.json",
+        )):
+            continue
+        assert_infer_input_type(file, InputFileType.JsonSchema)
+    for file in (DATA_PATH / "openapi").rglob("*"):
+        if "all_of_with_relative_ref" in file.parts:
+            continue
+        if "reference_same_hierarchy_directory" in file.parts:
+            continue
+        if "external_ref_with_transitive_local_ref" in file.parts and file.name != "openapi.yaml":
+            continue
+        if "paths_external_ref" in file.parts and file.name != "openapi.yaml":
+            continue
+        if "paths_ref_with_external_schema" in file.parts and file.name != "openapi.yaml":
+            continue
+        if "webhooks_ref_with_external_schema" in file.parts and file.name != "openapi.yaml":
+            continue
+        if file.name.endswith((
+            "aliases.json",
+            "extra_data.json",
+            "extra_data_msgspec.json",
+            "invalid.yaml",
+            "list.json",
+            "empty_data.json",
+            "root_model.yaml",
+            "json_pointer.yaml",
+            "const.json",
+            "array_called_fields_with_oneOf_items.yaml",
+        )):
+            continue
+
+        if file.name.endswith("not.json"):
+            assert_invalid_infer_input_type(file)
+            continue
+        assert_infer_input_type(file, InputFileType.OpenAPI)
diff -pruN 0.26.4-3/tests/test_main_kr.py 0.45.0-1/tests/test_main_kr.py
--- 0.26.4-3/tests/test_main_kr.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/test_main_kr.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,1558 @@
+"""Tests for main CLI functionality with Korean locale settings."""
+
+from __future__ import annotations
+
+from argparse import Namespace
+from pathlib import Path
+from typing import TYPE_CHECKING
+from unittest.mock import Mock, patch
+
+import black
+import pydantic
+import pytest
+from packaging import version
+
+from datamodel_code_generator import MIN_VERSION, chdir, inferred_message
+from datamodel_code_generator.__main__ import Exit, main
+from datamodel_code_generator.arguments import arg_parser
+from tests.conftest import create_assert_file_content, freeze_time
+from tests.main.conftest import run_main_and_assert, run_main_with_args
+
+if TYPE_CHECKING:
+    from pytest_mock import MockerFixture
+
+DATA_PATH: Path = Path(__file__).parent / "data"
+OPEN_API_DATA_PATH: Path = DATA_PATH / "openapi"
+JSON_SCHEMA_DATA_PATH: Path = DATA_PATH / "jsonschema"
+EXPECTED_MAIN_KR_PATH = DATA_PATH / "expected" / "main_kr"
+
+assert_file_content = create_assert_file_content(EXPECTED_MAIN_KR_PATH)
+
+
+TIMESTAMP = "1985-10-26T01:21:00-07:00"
+
+
+@pytest.fixture(autouse=True)
+def reset_namespace(monkeypatch: pytest.MonkeyPatch) -> None:
+    """Reset argument namespace before each test."""
+    namespace_ = Namespace(no_color=False)
+    monkeypatch.setattr("datamodel_code_generator.__main__.namespace", namespace_)
+    monkeypatch.setattr("datamodel_code_generator.arguments.namespace", namespace_)
+
+
+@pytest.fixture
+def output_file(tmp_path: Path) -> Path:
+    """Return standard output file path."""
+    return tmp_path / "output.py"
+
+
+@pytest.fixture
+def output_dir(tmp_path: Path) -> Path:
+    """Return standard output directory path."""
+    return tmp_path / "model"
+
+
+@freeze_time("2019-07-26")
+def test_main(output_file: Path) -> None:
+    """Test basic main function with OpenAPI input."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="main/output.py",
+    )
+
+
+@freeze_time("2019-07-26")
+def test_main_base_class(output_file: Path, tmp_path: Path) -> None:
+    """Test main function with custom base class."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "main_base_class" / "output.py",
+        extra_args=["--base-class", "custom_module.Base"],
+        copy_files=[(DATA_PATH / "pyproject.toml", tmp_path / "pyproject.toml")],
+    )
+
+
+@freeze_time("2019-07-26")
+def test_target_python_version(output_file: Path) -> None:
+    """Test main function with target Python version."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "target_python_version" / "output.py",
+        extra_args=["--target-python-version", f"3.{MIN_VERSION}"],
+    )
+
+
+def test_main_modular(output_dir: Path) -> None:
+    """Test main function on modular file."""
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "modular.yaml",
+            output_path=output_dir,
+            expected_directory=EXPECTED_MAIN_KR_PATH / "main_modular",
+        )
+
+
+def test_main_modular_no_file() -> None:
+    """Test main function on modular file with no output name."""
+    run_main_with_args(["--input", str(OPEN_API_DATA_PATH / "modular.yaml")], expected_exit=Exit.ERROR)
+
+
+def test_main_modular_filename(output_file: Path) -> None:
+    """Test main function on modular file with filename."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "modular.yaml",
+        output_path=output_file,
+        expected_exit=Exit.ERROR,
+    )
+
+
+def test_main_no_file(capsys: pytest.CaptureFixture[str], tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+    """Test main function on non-modular file with no output name."""
+    monkeypatch.chdir(tmp_path)
+
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "api.yaml",
+            output_path=None,
+            expected_stdout_path=EXPECTED_MAIN_KR_PATH / "main_no_file" / "output.py",
+            capsys=capsys,
+            expected_stderr=inferred_message.format("openapi") + "\n",
+        )
+
+
+def test_main_custom_template_dir(
+    capsys: pytest.CaptureFixture[str], tmp_path: Path, monkeypatch: pytest.MonkeyPatch
+) -> None:
+    """Test main function with custom template directory."""
+    monkeypatch.chdir(tmp_path)
+
+    custom_template_dir = DATA_PATH / "templates"
+    extra_template_data = OPEN_API_DATA_PATH / "extra_data.json"
+
+    with freeze_time(TIMESTAMP):
+        run_main_and_assert(
+            input_path=OPEN_API_DATA_PATH / "api.yaml",
+            output_path=None,
+            expected_stdout_path=EXPECTED_MAIN_KR_PATH / "main_custom_template_dir" / "output.py",
+            capsys=capsys,
+            extra_args=[
+                "--custom-template-dir",
+                str(custom_template_dir),
+                "--extra-template-data",
+                str(extra_template_data),
+            ],
+            expected_stderr=inferred_message.format("openapi") + "\n",
+        )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] >= "24",
+    reason="Installed black doesn't support the old style",
+)
+@freeze_time("2019-07-26")
+def test_pyproject(output_file: Path, tmp_path: Path) -> None:
+    """Test main function with pyproject.toml configuration."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file="pyproject/output.py",
+        copy_files=[(DATA_PATH / "project" / "pyproject.toml", tmp_path / "pyproject.toml")],
+    )
+
+
+@pytest.mark.parametrize("language", ["UK", "US"])
+def test_pyproject_respects_both_spellings_of_capitalize_enum_members_flag(language: str, tmp_path: Path) -> None:
+    """Test that both UK and US spellings of capitalise are accepted."""
+    pyproject_toml_data = f"""
+[tool.datamodel-codegen]
+capitali{"s" if language == "UK" else "z"}e-enum-members = true
+enable-version-header = false
+input-file-type = "jsonschema"
+"""
+    with (tmp_path / "pyproject.toml").open("w") as f:
+        f.write(pyproject_toml_data)
+
+        input_data = """
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "definitions": {
+    "MyEnum": {
+      "enum": [
+        "MEMBER_1",
+        "member_2"
+      ]
+    }
+  }
+}
+"""
+    input_file = tmp_path / "schema.json"
+    with input_file.open("w") as f:
+        f.write(input_data)
+
+    expected_output = """# generated by datamodel-codegen:
+#   filename:  schema.json
+
+from __future__ import annotations
+
+from enum import Enum
+from typing import Any
+
+from pydantic import BaseModel
+
+
+class Model(BaseModel):
+    __root__: Any
+
+
+class MyEnum(Enum):
+    MEMBER_1 = 'MEMBER_1'
+    member_2 = 'member_2'
+"""
+
+    output_file: Path = tmp_path / "output.py"
+    run_main_and_assert(
+        input_path=input_file,
+        output_path=output_file,
+        expected_output=expected_output,
+        extra_args=["--disable-timestamp"],
+    )
+
+
+@pytest.mark.skipif(
+    black.__version__.split(".")[0] == "19",
+    reason="Installed black doesn't support the old style",
+)
+@freeze_time("2019-07-26")
+def test_pyproject_with_tool_section(output_file: Path, tmp_path: Path) -> None:
+    """Test that a pyproject.toml with [tool.datamodel-codegen] section is found and applied."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+target-python-version = "3.10"
+strict-types = ["str"]
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    with chdir(tmp_path):
+        run_main_and_assert(
+            input_path=(OPEN_API_DATA_PATH / "api.yaml").resolve(),
+            output_path=output_file.resolve(),
+            input_file_type=None,
+            assert_func=assert_file_content,
+            expected_file=EXPECTED_MAIN_KR_PATH / "pyproject" / "output.strictstr.py",
+        )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-schema-description"],
+    input_schema="openapi/api_multiline_docstrings.yaml",
+    cli_args=["--use-schema-description"],
+    golden_output="main_kr/main_use_schema_description/output.py",
+    related_options=["--use-field-description", "--use-inline-field-description"],
+)
+@freeze_time("2019-07-26")
+def test_main_use_schema_description(output_file: Path) -> None:
+    """Use schema description as class docstring.
+
+    The `--use-schema-description` flag extracts the `description` property from
+    schema definitions and adds it as a docstring to the generated class. This is
+    useful for preserving documentation from your schema in the generated code.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api_multiline_docstrings.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "main_use_schema_description" / "output.py",
+        extra_args=["--use-schema-description"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-field-description"],
+    input_schema="openapi/api_multiline_docstrings.yaml",
+    cli_args=["--use-field-description"],
+    golden_output="main_kr/main_use_field_description/output.py",
+    related_options=["--use-schema-description", "--use-inline-field-description"],
+)
+@freeze_time("2022-11-11")
+def test_main_use_field_description(output_file: Path) -> None:
+    """Add field descriptions using Pydantic Field().
+
+    The `--use-field-description` flag adds the `description` property from
+    schema fields as the `description` parameter in Pydantic Field(). This
+    provides documentation that is accessible via model schema and OpenAPI docs.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api_multiline_docstrings.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "main_use_field_description" / "output.py",
+        extra_args=["--use-field-description"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-inline-field-description"],
+    input_schema="openapi/api_multiline_docstrings.yaml",
+    cli_args=["--use-inline-field-description"],
+    golden_output="main_kr/main_use_inline_field_description/output.py",
+    related_options=["--use-field-description", "--use-schema-description"],
+)
+@freeze_time("2022-11-11")
+def test_main_use_inline_field_description(output_file: Path) -> None:
+    """Add field descriptions as inline comments.
+
+    The `--use-inline-field-description` flag adds the `description` property from
+    schema fields as inline comments after each field definition. This provides
+    documentation without using Field() wrappers.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "api_multiline_docstrings.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "main_use_inline_field_description" / "output.py",
+        extra_args=["--use-inline-field-description"],
+    )
+
+
+def test_capitalise_enum_members(tmp_path: Path) -> None:
+    """Test capitalise-enum-members option (issue #2370)."""
+    input_data = """
+openapi: 3.0.3
+info:
+  version: X.Y.Z
+  title: example schema
+servers:
+  - url: "https://acme.org"
+paths: {}
+components:
+  schemas:
+    EnumSystems:
+      type: enum
+      enum:
+        - linux
+        - osx
+        - windows
+"""
+    input_file = tmp_path / "myschema.yaml"
+    input_file.write_text(input_data, encoding="utf_8")
+
+    expected_output = """# generated by datamodel-codegen:
+#   filename:  myschema.yaml
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class EnumSystems(Enum):
+    LINUX = 'linux'
+    OSX = 'osx'
+    WINDOWS = 'windows'
+"""
+
+    output_file: Path = tmp_path / "output.py"
+    run_main_and_assert(
+        input_path=input_file,
+        output_path=output_file,
+        expected_output=expected_output,
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--disable-timestamp",
+            "--capitalise-enum-members",
+            "--snake-case-field",
+        ],
+    )
+
+
+def test_capitalise_enum_members_and_use_subclass_enum(tmp_path: Path) -> None:
+    """Test combination of capitalise-enum-members and use-subclass-enum (issue #2395)."""
+    input_data = """
+openapi: 3.0.3
+info:
+  version: X.Y.Z
+  title: example schema
+servers:
+  - url: "https://acme.org"
+paths: {}
+components:
+  schemas:
+    EnumSystems:
+      type: string
+      enum:
+        - linux
+        - osx
+        - windows
+"""
+    input_file = tmp_path / "myschema.yaml"
+    input_file.write_text(input_data, encoding="utf_8")
+
+    expected_output = """# generated by datamodel-codegen:
+#   filename:  myschema.yaml
+
+from __future__ import annotations
+
+from enum import Enum
+
+
+class EnumSystems(str, Enum):
+    LINUX = 'linux'
+    OSX = 'osx'
+    WINDOWS = 'windows'
+"""
+
+    output_file: Path = tmp_path / "output.py"
+    run_main_and_assert(
+        input_path=input_file,
+        output_path=output_file,
+        expected_output=expected_output,
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--disable-timestamp",
+            "--capitalise-enum-members",
+            "--snake-case-field",
+            "--use-subclass-enum",
+        ],
+    )
+
+
+EXPECTED_GENERATE_PYPROJECT_CONFIG_PATH = EXPECTED_MAIN_KR_PATH / "generate_pyproject_config"
+
+
+@pytest.mark.cli_doc(
+    options=["--generate-pyproject-config"],
+    cli_args=["--generate-pyproject-config", "--input", "schema.yaml", "--output", "model.py"],
+    expected_stdout="main_kr/generate_pyproject_config/basic.txt",
+)
+def test_generate_pyproject_config_basic(capsys: pytest.CaptureFixture[str]) -> None:
+    """Generate pyproject.toml configuration from CLI arguments.
+
+    The `--generate-pyproject-config` flag outputs a pyproject.toml configuration
+    snippet based on the provided CLI arguments. This is useful for converting
+    a working CLI command into a reusable configuration file.
+    """
+    run_main_with_args(
+        [
+            "--generate-pyproject-config",
+            "--input",
+            "schema.yaml",
+            "--output",
+            "model.py",
+        ],
+        capsys=capsys,
+        expected_stdout_path=EXPECTED_GENERATE_PYPROJECT_CONFIG_PATH / "basic.txt",
+    )
+
+
+def test_generate_pyproject_config_with_boolean_options(capsys: pytest.CaptureFixture[str]) -> None:
+    """Test --generate-pyproject-config with boolean options."""
+    run_main_with_args(
+        [
+            "--generate-pyproject-config",
+            "--snake-case-field",
+            "--use-annotated",
+            "--collapse-root-models",
+        ],
+        capsys=capsys,
+        expected_stdout_path=EXPECTED_GENERATE_PYPROJECT_CONFIG_PATH / "boolean_options.txt",
+    )
+
+
+def test_generate_pyproject_config_with_list_options(capsys: pytest.CaptureFixture[str]) -> None:
+    """Test --generate-pyproject-config with list options."""
+    run_main_with_args(
+        [
+            "--generate-pyproject-config",
+            "--strict-types",
+            "str",
+            "int",
+        ],
+        capsys=capsys,
+        expected_stdout_path=EXPECTED_GENERATE_PYPROJECT_CONFIG_PATH / "list_options.txt",
+    )
+
+
+def test_generate_pyproject_config_with_multiple_options(capsys: pytest.CaptureFixture[str]) -> None:
+    """Test --generate-pyproject-config with various option types."""
+    run_main_with_args(
+        [
+            "--generate-pyproject-config",
+            "--input",
+            "schema.yaml",
+            "--output",
+            "model.py",
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--target-python-version",
+            "3.11",
+            "--snake-case-field",
+            "--strict-types",
+            "str",
+            "bytes",
+        ],
+        capsys=capsys,
+        expected_stdout_path=EXPECTED_GENERATE_PYPROJECT_CONFIG_PATH / "multiple_options.txt",
+    )
+
+
+def test_generate_pyproject_config_excludes_meta_options(capsys: pytest.CaptureFixture[str]) -> None:
+    """Test that meta options are excluded from generated config."""
+    run_main_with_args(
+        [
+            "--generate-pyproject-config",
+            "--input",
+            "schema.yaml",
+        ],
+        capsys=capsys,
+        expected_stdout_path=EXPECTED_GENERATE_PYPROJECT_CONFIG_PATH / "excludes_meta_options.txt",
+    )
+
+
+def test_generate_pyproject_config_with_enum_option(capsys: pytest.CaptureFixture[str]) -> None:
+    """Test --generate-pyproject-config with Enum option."""
+    run_main_with_args(
+        [
+            "--generate-pyproject-config",
+            "--input",
+            "schema.yaml",
+            "--read-only-write-only-model-type",
+            "all",
+        ],
+        capsys=capsys,
+        expected_stdout_path=EXPECTED_GENERATE_PYPROJECT_CONFIG_PATH / "enum_option.txt",
+    )
+
+
+EXPECTED_GENERATE_CLI_COMMAND_PATH = EXPECTED_MAIN_KR_PATH / "generate_cli_command"
+
+
+@pytest.mark.cli_doc(
+    options=["--generate-cli-command"],
+    cli_args=["--generate-cli-command"],
+    config_content="""[tool.datamodel-codegen]
+input = "schema.yaml"
+output = "model.py"
+""",
+    expected_stdout="main_kr/generate_cli_command/basic.txt",
+)
+def test_generate_cli_command_basic(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
+    """Generate CLI command from pyproject.toml configuration.
+
+    The `--generate-cli-command` flag reads your pyproject.toml configuration
+    and outputs the equivalent CLI command. This is useful for debugging
+    configuration issues or sharing commands with others.
+    """
+    pyproject_toml = """
+[tool.datamodel-codegen]
+input = "schema.yaml"
+output = "model.py"
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    with chdir(tmp_path):
+        run_main_with_args(
+            ["--generate-cli-command"],
+            capsys=capsys,
+            expected_stdout_path=EXPECTED_GENERATE_CLI_COMMAND_PATH / "basic.txt",
+        )
+
+
+def test_generate_cli_command_with_boolean_options(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
+    """Test --generate-cli-command with boolean options."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+snake-case-field = true
+use-annotated = true
+collapse-root-models = true
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    with chdir(tmp_path):
+        run_main_with_args(
+            ["--generate-cli-command"],
+            capsys=capsys,
+            expected_stdout_path=EXPECTED_GENERATE_CLI_COMMAND_PATH / "boolean_options.txt",
+        )
+
+
+def test_generate_cli_command_with_list_options(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
+    """Test --generate-cli-command with list options."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+strict-types = ["str", "int"]
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    with chdir(tmp_path):
+        run_main_with_args(
+            ["--generate-cli-command"],
+            capsys=capsys,
+            expected_stdout_path=EXPECTED_GENERATE_CLI_COMMAND_PATH / "list_options.txt",
+        )
+
+
+def test_generate_cli_command_with_multiple_options(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
+    """Test --generate-cli-command with various option types."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+input = "schema.yaml"
+output = "model.py"
+output-model-type = "pydantic_v2.BaseModel"
+target-python-version = "3.11"
+snake-case-field = true
+strict-types = ["str", "bytes"]
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    with chdir(tmp_path):
+        run_main_with_args(
+            ["--generate-cli-command"],
+            capsys=capsys,
+            expected_stdout_path=EXPECTED_GENERATE_CLI_COMMAND_PATH / "multiple_options.txt",
+        )
+
+
+def test_generate_cli_command_no_config(tmp_path: Path) -> None:
+    """Test --generate-cli-command when no config found."""
+    with chdir(tmp_path):
+        run_main_with_args(
+            ["--generate-cli-command"],
+            expected_exit=Exit.ERROR,
+        )
+
+
+def test_generate_cli_command_with_no_use_specialized_enum(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
+    """Test --generate-cli-command with use-specialized-enum = false."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+input = "schema.yaml"
+use-specialized-enum = false
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    with chdir(tmp_path):
+        run_main_with_args(
+            ["--generate-cli-command"],
+            capsys=capsys,
+            expected_stdout_path=EXPECTED_GENERATE_CLI_COMMAND_PATH / "no_use_specialized_enum.txt",
+        )
+
+
+def test_generate_cli_command_with_spaces_in_values(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
+    """Test --generate-cli-command with spaces in values."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+input = "my schema.yaml"
+output = "my model.py"
+http-headers = ["Authorization: Bearer token", "X-Custom: value"]
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    with chdir(tmp_path):
+        run_main_with_args(
+            ["--generate-cli-command"],
+            capsys=capsys,
+            expected_stdout_path=EXPECTED_GENERATE_CLI_COMMAND_PATH / "spaces_in_values.txt",
+        )
+
+
+def test_generate_cli_command_with_false_boolean(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
+    """Test --generate-cli-command with regular boolean set to false (should be skipped)."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+input = "schema.yaml"
+snake-case-field = false
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    with chdir(tmp_path):
+        run_main_with_args(
+            ["--generate-cli-command"],
+            capsys=capsys,
+            expected_stdout_path=EXPECTED_GENERATE_CLI_COMMAND_PATH / "false_boolean.txt",
+        )
+
+
+def test_generate_cli_command_excludes_excluded_options(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
+    """Test --generate-cli-command excludes options like debug, version, etc."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+input = "schema.yaml"
+debug = true
+version = true
+no-color = true
+disable-warnings = true
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    with chdir(tmp_path):
+        run_main_with_args(
+            ["--generate-cli-command"],
+            capsys=capsys,
+            expected_stdout_path=EXPECTED_GENERATE_CLI_COMMAND_PATH / "excluded_options.txt",
+        )
+
+
+EXPECTED_PYPROJECT_PROFILE_PATH = EXPECTED_MAIN_KR_PATH / "pyproject_profile"
+
+
+@pytest.mark.skipif(
+    version.parse(black.__version__) < version.parse("23.0.0"),
+    reason="black 22.x doesn't support Python 3.11 target version",
+)
+@freeze_time("2019-07-26")
+def test_pyproject_with_profile(output_file: Path, tmp_path: Path) -> None:
+    """Test loading a named profile from pyproject.toml."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+target-python-version = "3.9"
+enable-version-header = false
+
+[tool.datamodel-codegen.profiles.api]
+target-python-version = "3.11"
+snake-case-field = true
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    input_data = """
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "firstName": {"type": "string"},
+    "lastName": {"type": "string"}
+  }
+}
+"""
+    input_file = tmp_path / "schema.json"
+    input_file.write_text(input_data)
+
+    with chdir(tmp_path):
+        run_main_and_assert(
+            input_path=input_file,
+            output_path=output_file.resolve(),
+            assert_func=assert_file_content,
+            expected_file=EXPECTED_PYPROJECT_PROFILE_PATH / "with_profile.py",
+            extra_args=["--profile", "api", "--disable-timestamp"],
+        )
+
+
+def test_pyproject_profile_not_found(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
+    """Test error when profile is not found."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+target-python-version = "3.9"
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    input_file = tmp_path / "schema.json"
+    input_file.write_text('{"type": "object"}')
+
+    output_file = tmp_path / "output.py"
+
+    with chdir(tmp_path):
+        return_code = run_main_with_args(
+            ["--input", str(input_file), "--output", str(output_file), "--profile", "nonexistent"],
+            expected_exit=Exit.ERROR,
+            capsys=capsys,
+        )
+        assert return_code == Exit.ERROR
+        captured = capsys.readouterr()
+        assert "Profile 'nonexistent' not found in pyproject.toml" in captured.err
+
+
+@freeze_time("2019-07-26")
+def test_ignore_pyproject_option(output_file: Path, tmp_path: Path) -> None:
+    """Test --ignore-pyproject ignores pyproject.toml configuration."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+snake-case-field = true
+enable-version-header = true
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    input_data = """
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "firstName": {"type": "string"},
+    "lastName": {"type": "string"}
+  }
+}
+"""
+    input_file = tmp_path / "schema.json"
+    input_file.write_text(input_data)
+
+    with chdir(tmp_path):
+        run_main_and_assert(
+            input_path=input_file,
+            output_path=output_file.resolve(),
+            assert_func=assert_file_content,
+            expected_file=EXPECTED_PYPROJECT_PROFILE_PATH / "ignore_pyproject.py",
+            extra_args=["--ignore-pyproject", "--disable-timestamp"],
+        )
+
+
+@freeze_time("2019-07-26")
+def test_profile_overrides_base_config_shallow_merge(output_file: Path, tmp_path: Path) -> None:
+    """Test that profile settings shallow-merge (replace) base settings for lists."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+strict-types = ["str", "int"]
+target-python-version = "3.9"
+enable-version-header = false
+
+[tool.datamodel-codegen.profiles.api]
+strict-types = ["bytes"]
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    input_data = """
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "data": {"type": "string", "format": "binary"}
+  }
+}
+"""
+    input_file = tmp_path / "schema.json"
+    input_file.write_text(input_data)
+
+    with chdir(tmp_path):
+        run_main_and_assert(
+            input_path=input_file,
+            output_path=output_file.resolve(),
+            assert_func=assert_file_content,
+            expected_file=EXPECTED_PYPROJECT_PROFILE_PATH / "shallow_merge.py",
+            extra_args=["--profile", "api", "--disable-timestamp"],
+        )
+
+
+def test_generate_cli_command_with_profile(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
+    """Test --generate-cli-command reflects merged profile settings."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+target-python-version = "3.9"
+snake-case-field = true
+
+[tool.datamodel-codegen.profiles.api]
+input = "api.yaml"
+target-python-version = "3.11"
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    with chdir(tmp_path):
+        run_main_with_args(
+            ["--profile", "api", "--generate-cli-command"],
+            capsys=capsys,
+        )
+        captured = capsys.readouterr()
+        # Profile value should override base
+        assert "--target-python-version 3.11" in captured.out
+        # Base value should be inherited
+        assert "--snake-case-field" in captured.out
+        # Profile-specific value (no quotes when no spaces in value)
+        assert "--input api.yaml" in captured.out
+
+
+def test_help_shows_new_options() -> None:
+    """Test that --profile and --ignore-pyproject appear in help."""
+    help_text = arg_parser.format_help()
+    assert "--profile" in help_text
+    assert "--ignore-pyproject" in help_text
+    assert "pyproject.toml" in help_text
+
+
+@pytest.mark.skipif(
+    version.parse(black.__version__) < version.parse("23.0.0"),
+    reason="black 22.x doesn't support Python 3.11 target version",
+)
+def test_pyproject_profile_inherits_base_settings(output_file: Path, tmp_path: Path) -> None:
+    """Test that profile inherits settings from base config."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+snake-case-field = true
+enable-version-header = false
+
+[tool.datamodel-codegen.profiles.api]
+target-python-version = "3.11"
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    input_data = """
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "firstName": {"type": "string"}
+  }
+}
+"""
+    input_file = tmp_path / "schema.json"
+    input_file.write_text(input_data)
+
+    with chdir(tmp_path):
+        run_main_and_assert(
+            input_path=input_file,
+            output_path=output_file.resolve(),
+            assert_func=assert_file_content,
+            expected_file=EXPECTED_PYPROJECT_PROFILE_PATH / "inherits_base.py",
+            extra_args=["--profile", "api", "--disable-timestamp"],
+        )
+
+
+@pytest.mark.skipif(
+    version.parse(black.__version__) < version.parse("23.0.0"),
+    reason="black 22.x doesn't support Python 3.11 target version",
+)
+@freeze_time("2019-07-26")
+def test_cli_args_override_profile_and_base(output_file: Path, tmp_path: Path) -> None:
+    """Test that CLI arguments take precedence over profile and base settings."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+target-python-version = "3.9"
+enable-version-header = false
+
+[tool.datamodel-codegen.profiles.api]
+target-python-version = "3.10"
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    input_data = """
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "firstName": {"type": "string"}
+  }
+}
+"""
+    input_file = tmp_path / "schema.json"
+    input_file.write_text(input_data)
+
+    with chdir(tmp_path):
+        run_main_and_assert(
+            input_path=input_file,
+            output_path=output_file.resolve(),
+            assert_func=assert_file_content,
+            expected_file=EXPECTED_PYPROJECT_PROFILE_PATH / "cli_override.py",
+            extra_args=[
+                "--profile",
+                "api",
+                "--disable-timestamp",
+                "--target-python-version",
+                "3.11",
+                "--use-union-operator",
+            ],
+        )
+
+
+def test_ignore_pyproject_with_profile(tmp_path: Path) -> None:
+    """Test that --ignore-pyproject ignores --profile as well."""
+    pyproject_toml = """
+[tool.datamodel-codegen]
+snake-case-field = true
+
+[tool.datamodel-codegen.profiles.api]
+target-python-version = "3.11"
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    input_data = """
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "firstName": {"type": "string"}
+  }
+}
+"""
+    input_file = tmp_path / "schema.json"
+    input_file.write_text(input_data)
+    output_file = tmp_path / "output.py"
+
+    with chdir(tmp_path):
+        run_main_with_args(
+            [
+                "--input",
+                str(input_file),
+                "--output",
+                str(output_file),
+                "--ignore-pyproject",
+                "--profile",
+                "api",
+                "--disable-timestamp",
+            ],
+        )
+        output_content = output_file.read_text()
+        assert "firstName" in output_content
+        assert "first_name" not in output_content
+
+
+def test_profile_without_pyproject_errors(tmp_path: Path, capsys: pytest.CaptureFixture[str]) -> None:
+    """Test that --profile without pyproject.toml raises an error."""
+    input_file = tmp_path / "schema.json"
+    input_file.write_text('{"type": "object"}')
+    output_file = tmp_path / "output.py"
+
+    with chdir(tmp_path):
+        return_code = run_main_with_args(
+            ["--input", str(input_file), "--output", str(output_file), "--profile", "api"],
+            expected_exit=Exit.ERROR,
+            capsys=capsys,
+        )
+        assert return_code == Exit.ERROR
+        captured = capsys.readouterr()
+        assert "no [tool.datamodel-codegen] section found" in captured.err.lower()
+
+
+@freeze_time("2019-07-26")
+def test_allof_with_description_generates_class_not_alias(output_file: Path) -> None:
+    """Test that allOf with description generates class definition, not alias."""
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "allof_with_description_only.yaml",
+        output_path=output_file,
+        input_file_type=None,
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "main_allof_with_description_only" / "output.py",
+        extra_args=[
+            "--output-model-type",
+            "pydantic_v2.BaseModel",
+            "--use-schema-description",
+        ],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-decimal-for-multiple-of"],
+    input_schema="jsonschema/use_decimal_for_multiple_of.json",
+    cli_args=["--use-decimal-for-multiple-of"],
+    golden_output="main_kr/use_decimal_for_multiple_of/output.py",
+)
+@freeze_time("2019-07-26")
+def test_use_decimal_for_multiple_of(output_file: Path) -> None:
+    """Generate Decimal types for fields with multipleOf constraint.
+
+    The `--use-decimal-for-multiple-of` flag generates `condecimal` or `Decimal`
+    types for numeric fields that have a `multipleOf` constraint. This ensures
+    precise decimal arithmetic when validating values against the constraint.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "use_decimal_for_multiple_of.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "use_decimal_for_multiple_of" / "output.py",
+        extra_args=["--use-decimal-for-multiple-of"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-pendulum"],
+    input_schema="jsonschema/use_pendulum.json",
+    cli_args=["--use-pendulum"],
+    golden_output="main_kr/use_pendulum/output.py",
+)
+@freeze_time("2019-07-26")
+def test_use_pendulum(output_file: Path) -> None:
+    """Use pendulum types for date/time fields instead of datetime module.
+
+    The `--use-pendulum` flag generates pendulum library types (DateTime, Date,
+    Time, Duration) instead of standard datetime types. This is useful when
+    working with the pendulum library for enhanced timezone and date handling.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "use_pendulum.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "use_pendulum" / "output.py",
+        extra_args=["--use-pendulum"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-non-positive-negative-number-constrained-types"],
+    input_schema="jsonschema/use_non_positive_negative.json",
+    cli_args=["--use-non-positive-negative-number-constrained-types"],
+    golden_output="main_kr/use_non_positive_negative/output.py",
+)
+@pytest.mark.skipif(pydantic.VERSION < "2.0.0", reason="Require Pydantic version 2.0.0 or later")
+@freeze_time("2019-07-26")
+def test_use_non_positive_negative_number_constrained_types(output_file: Path) -> None:
+    """Use NonPositive/NonNegative types for number constraints.
+
+    The `--use-non-positive-negative-number-constrained-types` flag generates
+    Pydantic's NonPositiveInt, NonNegativeInt, NonPositiveFloat, and NonNegativeFloat
+    types for fields with minimum: 0 or maximum: 0 constraints, instead of using
+    conint/confloat with ge/le parameters.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "use_non_positive_negative.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "use_non_positive_negative" / "output.py",
+        extra_args=["--use-non-positive-negative-number-constrained-types"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--include-path-parameters"],
+    input_schema="openapi/include_path_parameters.yaml",
+    cli_args=["--include-path-parameters", "--openapi-scopes", "schemas", "paths", "parameters"],
+    golden_output="main_kr/include_path_parameters/output.py",
+)
+@freeze_time("2019-07-26")
+def test_include_path_parameters(output_file: Path) -> None:
+    """Include OpenAPI path parameters in generated parameter models.
+
+    The `--include-path-parameters` flag adds path parameters (like /users/{userId})
+    to the generated request parameter models. By default, only query parameters
+    are included. Use this with `--openapi-scopes parameters` to generate parameter
+    models that include both path and query parameters.
+    """
+    run_main_and_assert(
+        input_path=OPEN_API_DATA_PATH / "include_path_parameters.yaml",
+        output_path=output_file,
+        input_file_type="openapi",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "include_path_parameters" / "output.py",
+        extra_args=["--include-path-parameters", "--openapi-scopes", "schemas", "paths", "parameters"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--no-alias"],
+    input_schema="jsonschema/no_alias.json",
+    cli_args=["--no-alias"],
+    golden_output="main_kr/no_alias/with_option.py",
+    comparison_output="main_kr/no_alias/without_option.py",
+)
+@freeze_time("2019-07-26")
+def test_no_alias(output_file: Path) -> None:
+    """Disable Field alias generation for non-Python-safe property names.
+
+    The `--no-alias` flag disables automatic alias generation when JSON property
+    names contain characters invalid in Python (like hyphens). Without this flag,
+    fields are renamed to Python-safe names with `Field(alias='original-name')`.
+    With this flag, only Python-safe names are used without aliases.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "no_alias.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "no_alias" / "with_option.py",
+        extra_args=["--no-alias"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--custom-file-header"],
+    input_schema="jsonschema/no_alias.json",
+    cli_args=["--custom-file-header", "# Copyright 2024 MyCompany"],
+    golden_output="main_kr/custom_file_header/with_option.py",
+    comparison_output="main_kr/custom_file_header/without_option.py",
+)
+@freeze_time("2019-07-26")
+def test_custom_file_header(output_file: Path) -> None:
+    """Add custom header text to the generated file.
+
+    The `--custom-file-header` flag replaces the default "generated by datamodel-codegen"
+    header with custom text. This is useful for adding copyright notices, license
+    headers, or other metadata to generated files.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "no_alias.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "custom_file_header" / "with_option.py",
+        extra_args=["--custom-file-header", "# Copyright 2024 MyCompany"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--url", "--http-headers"],
+    input_schema="jsonschema/pet_simple.json",
+    cli_args=["--url", "https://api.example.com/schema.json", "--http-headers", "Authorization:Bearer token"],
+    golden_output="main_kr/url_with_headers/output.py",
+)
+@freeze_time("2019-07-26")
+def test_url_with_http_headers(mocker: MockerFixture, output_file: Path) -> None:
+    """Fetch schema from URL with custom HTTP headers.
+
+    The `--url` flag specifies a remote URL to fetch the schema from instead of
+    a local file. The `--http-headers` flag adds custom HTTP headers to the request,
+    useful for authentication (e.g., Bearer tokens) or custom API requirements.
+    Format: `HeaderName:HeaderValue`.
+    """
+    mock_response = Mock()
+    mock_response.text = JSON_SCHEMA_DATA_PATH.joinpath("pet_simple.json").read_text()
+
+    mocker.patch("httpx.get", return_value=mock_response)
+
+    return_code = main([
+        "--url",
+        "https://api.example.com/schema.json",
+        "--output",
+        str(output_file),
+        "--input-file-type",
+        "jsonschema",
+        "--http-headers",
+        "Authorization:Bearer token",
+    ])
+    assert return_code == 0
+    assert_file_content(output_file, EXPECTED_MAIN_KR_PATH / "url_with_headers" / "output.py")
+
+
+@pytest.mark.cli_doc(
+    options=["--input"],
+    input_schema="jsonschema/pet_simple.json",
+    cli_args=["--input", "pet_simple.json", "--output", "output.py"],
+    golden_output="main_kr/input_output/output.py",
+)
+@freeze_time("2019-07-26")
+def test_input_option(output_file: Path) -> None:
+    """Specify the input schema file path.
+
+    The `--input` flag specifies the path to the schema file (JSON Schema,
+    OpenAPI, GraphQL, etc.). Multiple input files can be specified to merge
+    schemas. Required unless using `--url` to fetch schema from a URL.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "pet_simple.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "input_output" / "output.py",
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--output"],
+    input_schema="jsonschema/pet_simple.json",
+    cli_args=["--input", "pet_simple.json", "--output", "output.py"],
+    golden_output="main_kr/input_output/output.py",
+)
+@freeze_time("2019-07-26")
+def test_output_option(output_file: Path) -> None:
+    """Specify the destination path for generated Python code.
+
+    The `--output` flag specifies where to write the generated Python code.
+    It can be either a file path (single-file output) or a directory path
+    (multi-file output for modular schemas). If omitted, the generated code
+    is written to stdout.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "pet_simple.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "input_output" / "output.py",
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--encoding"],
+    input_schema="jsonschema/encoding_test.json",
+    cli_args=["--encoding", "utf-8"],
+    golden_output="main_kr/encoding/output.py",
+)
+@freeze_time("2019-07-26")
+def test_encoding_option(output_file: Path) -> None:
+    """Specify character encoding for input and output files.
+
+    The `--encoding` flag sets the character encoding used when reading
+    the schema file and writing the generated Python code. This is useful
+    for schemas containing non-ASCII characters (e.g., Japanese, Chinese).
+    Default is utf-8.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "encoding_test.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "encoding" / "output.py",
+        extra_args=["--encoding", "utf-8"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--formatters"],
+    input_schema="jsonschema/pet_simple.json",
+    cli_args=["--formatters", "isort"],
+    golden_output="main_kr/formatters/output.py",
+)
+@freeze_time("2019-07-26")
+def test_formatters_option(output_file: Path) -> None:
+    """Specify code formatters to apply to generated output.
+
+    The `--formatters` flag specifies which code formatters to apply to
+    the generated Python code. Available formatters are: black, isort,
+    ruff, yapf, autopep8, autoflake. Default is [black, isort].
+    Use this to customize formatting or disable formatters entirely.
+    """
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "pet_simple.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "formatters" / "output.py",
+        extra_args=["--formatters", "isort"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--custom-formatters-kwargs"],
+    input_schema="jsonschema/pet_simple.json",
+    cli_args=["--custom-formatters-kwargs", "formatter_kwargs.json"],
+    golden_output="main_kr/input_output/output.py",
+)
+@freeze_time("2019-07-26")
+def test_custom_formatters_kwargs_option(output_file: Path) -> None:
+    """Pass custom arguments to custom formatters via JSON file.
+
+    The `--custom-formatters-kwargs` flag accepts a path to a JSON file containing
+    custom configuration for custom formatters (used with --custom-formatters).
+    The file should contain a JSON object mapping formatter names to their kwargs.
+
+    Note: This option is primarily used with --custom-formatters to pass
+    configuration to user-defined formatter modules.
+    """
+    # Simple test - the option is accepted. Full usage requires custom formatter module.
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "pet_simple.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "input_output" / "output.py",
+        extra_args=["--custom-formatters-kwargs", str(DATA_PATH / "config" / "formatter_kwargs.json")],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--http-ignore-tls"],
+    input_schema="jsonschema/pet_simple.json",
+    cli_args=["--url", "https://api.example.com/schema.json", "--http-ignore-tls"],
+    golden_output="main_kr/url_with_headers/output.py",
+)
+@freeze_time("2019-07-26")
+def test_http_ignore_tls(output_file: Path) -> None:
+    """Disable TLS certificate verification for HTTPS requests.
+
+    The `--http-ignore-tls` flag disables SSL/TLS certificate verification
+    when fetching schemas from HTTPS URLs. This is useful for development
+    environments with self-signed certificates. Not recommended for production.
+    """
+    mock_response = Mock()
+    mock_response.text = JSON_SCHEMA_DATA_PATH.joinpath("pet_simple.json").read_text()
+
+    with patch("httpx.get", return_value=mock_response) as mock_get:
+        return_code = main([
+            "--url",
+            "https://api.example.com/schema.json",
+            "--output",
+            str(output_file),
+            "--input-file-type",
+            "jsonschema",
+            "--http-ignore-tls",
+        ])
+        assert return_code == 0
+        # Verify that verify=False was passed to httpx.get
+        mock_get.assert_called_once()
+        call_kwargs = mock_get.call_args[1]
+        assert call_kwargs.get("verify") is False
+
+
+@pytest.mark.cli_doc(
+    options=["--http-query-parameters"],
+    input_schema="jsonschema/pet_simple.json",
+    cli_args=["--url", "https://api.example.com/schema.json", "--http-query-parameters", "version=v2", "format=json"],
+    golden_output="main_kr/url_with_headers/output.py",
+)
+@freeze_time("2019-07-26")
+def test_http_query_parameters(output_file: Path) -> None:
+    """Add query parameters to HTTP requests for remote schemas.
+
+    The `--http-query-parameters` flag adds query parameters to HTTP requests
+    when fetching schemas from URLs. Useful for APIs that require version
+    or format parameters. Format: `key=value`. Multiple parameters can be
+    specified: `--http-query-parameters version=v2 format=json`.
+    """
+    mock_response = Mock()
+    mock_response.text = JSON_SCHEMA_DATA_PATH.joinpath("pet_simple.json").read_text()
+
+    with patch("httpx.get", return_value=mock_response) as mock_get:
+        return_code = main([
+            "--url",
+            "https://api.example.com/schema.json",
+            "--output",
+            str(output_file),
+            "--input-file-type",
+            "jsonschema",
+            "--http-query-parameters",
+            "version=v2",
+            "format=json",
+        ])
+        assert return_code == 0
+        # Verify query parameters were passed as list of tuples
+        mock_get.assert_called_once()
+        call_kwargs = mock_get.call_args[1]
+        assert "params" in call_kwargs
+        # params is a list of tuples: [("version", "v2"), ("format", "json")]
+        params = call_kwargs["params"]
+        assert ("version", "v2") in params
+        assert ("format", "json") in params
+
+
+@pytest.mark.cli_doc(
+    options=["--ignore-pyproject"],
+    input_schema="jsonschema/ignore_pyproject_example.json",
+    cli_args=["--ignore-pyproject"],
+    golden_output="main_kr/ignore_pyproject/output.py",
+    comparison_output="main_kr/ignore_pyproject/without_option.py",
+)
+@freeze_time("2019-07-26")
+def test_ignore_pyproject_cli_doc(output_file: Path, tmp_path: Path) -> None:
+    """Ignore pyproject.toml configuration file.
+
+    The `--ignore-pyproject` flag tells datamodel-codegen to ignore any
+    [tool.datamodel-codegen] configuration in pyproject.toml. This is useful
+    when you want to override project defaults with CLI arguments, or when
+    testing without project configuration.
+    """
+    # Create a pyproject.toml with snake-case-field to demonstrate ignoring
+    pyproject_toml = """
+[tool.datamodel-codegen]
+snake-case-field = true
+"""
+    (tmp_path / "pyproject.toml").write_text(pyproject_toml)
+
+    input_data = """
+{
+  "$schema": "http://json-schema.org/draft-07/schema#",
+  "type": "object",
+  "properties": {
+    "firstName": {"type": "string"},
+    "lastName": {"type": "string"}
+  }
+}
+"""
+    input_file = tmp_path / "schema.json"
+    input_file.write_text(input_data)
+
+    with chdir(tmp_path):
+        run_main_and_assert(
+            input_path=input_file,
+            output_path=output_file.resolve(),
+            assert_func=assert_file_content,
+            expected_file=EXPECTED_MAIN_KR_PATH / "ignore_pyproject" / "output.py",
+            extra_args=["--ignore-pyproject", "--disable-timestamp"],
+        )
+
+
+@pytest.mark.cli_doc(
+    options=["--shared-module-name"],
+    input_schema="jsonschema/pet_simple.json",
+    cli_args=["--shared-module-name", "my_shared"],
+    golden_output="main_kr/input_output/output.py",
+)
+@freeze_time("2019-07-26")
+def test_shared_module_name(output_file: Path) -> None:
+    """Customize the name of the shared module for deduplicated models.
+
+    The `--shared-module-name` flag sets the name of the shared module created
+    when using `--reuse-model` with `--reuse-scope=tree`. This module contains
+    deduplicated models that are referenced from multiple files. Default is
+    `shared`. Use this if your schema already has a file named `shared`.
+
+    Note: This option only affects modular output with tree-level model reuse.
+    """
+    # Simple test - the option is accepted but only affects modular output with reuse
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "pet_simple.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "input_output" / "output.py",
+        extra_args=["--shared-module-name", "my_shared"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--use-exact-imports"],
+    input_schema="jsonschema/pet_simple.json",
+    cli_args=["--use-exact-imports"],
+    golden_output="main_kr/input_output/output.py",
+)
+@freeze_time("2019-07-26")
+def test_use_exact_imports(output_file: Path) -> None:
+    """Import exact types instead of modules.
+
+    The `--use-exact-imports` flag changes import style from module imports
+    to exact type imports. For example, instead of `from . import foo` then
+    `foo.Bar`, it generates `from .foo import Bar`. This can make the generated
+    code more explicit and easier to read.
+
+    Note: This option primarily affects modular output where imports between
+    modules are generated. For single-file output, the difference is minimal.
+    """
+    # Simple test - the option is accepted and works for single file output
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "pet_simple.json",
+        output_path=output_file,
+        input_file_type="jsonschema",
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "input_output" / "output.py",
+        extra_args=["--use-exact-imports"],
+    )
+
+
+@pytest.mark.cli_doc(
+    options=["--target-python-version"],
+    input_schema="jsonschema/person.json",
+    cli_args=["--target-python-version", "3.9", "--use-standard-collections"],
+    version_outputs={
+        "3.9": "main_kr/target_python_version/py39.py",
+        "3.10": "main_kr/target_python_version/py310.py",
+    },
+    primary=True,
+)
+@freeze_time("2019-07-26")
+def test_target_python_version_outputs(output_file: Path) -> None:
+    """Target Python version for generated code syntax and imports.
+
+    The `--target-python-version` flag controls Python version-specific syntax:
+
+    - **Python 3.9**: Uses `Optional[X]` for optional types, `typing.Dict/List`
+    - **Python 3.10+**: Can use `X | None` union operator, built-in `dict/list`
+
+    This affects import statements and type annotation syntax in generated code.
+    """
+    # Test with Python 3.9 style
+    run_main_and_assert(
+        input_path=JSON_SCHEMA_DATA_PATH / "person.json",
+        output_path=output_file,
+        assert_func=assert_file_content,
+        expected_file=EXPECTED_MAIN_KR_PATH / "target_python_version" / "py39.py",
+        extra_args=["--target-python-version", "3.9", "--use-standard-collections"],
+    )
diff -pruN 0.26.4-3/tests/test_reference.py 0.45.0-1/tests/test_reference.py
--- 0.26.4-3/tests/test_reference.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/test_reference.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,358 @@
+"""Tests for reference resolution functionality."""
+
+from __future__ import annotations
+
+from pathlib import PurePosixPath, PureWindowsPath
+
+import pytest
+
+from datamodel_code_generator.http import join_url
+from datamodel_code_generator.reference import ModelResolver, get_relative_path, is_url
+
+
+@pytest.mark.parametrize(
+    ("base_path", "target_path", "expected"),
+    [
+        ("/a/b", "/a/b", "."),
+        ("/a/b", "/a/b/c", "c"),
+        ("/a/b", "/a/b/c/d", "c/d"),
+        ("/a/b/c", "/a/b", ".."),
+        ("/a/b/c/d", "/a/b", "../.."),
+        ("/a/b/c/d", "/a", "../../.."),
+        ("/a/b/c/d", "/a/x/y/z", "../../../x/y/z"),
+        ("/a/b/c/d", "a/x/y/z", "a/x/y/z"),
+        ("/a/b/c/d", "/a/b/e/d", "../../e/d"),
+    ],
+)
+def test_get_relative_path_posix(base_path: str, target_path: str, expected: str) -> None:
+    """Test get_relative_path function on POSIX paths."""
+    assert PurePosixPath(get_relative_path(PurePosixPath(base_path), PurePosixPath(target_path))) == PurePosixPath(
+        expected
+    )
+
+
+@pytest.mark.parametrize(
+    ("base_path", "target_path", "expected"),
+    [
+        ("c:/a/b", "c:/a/b", "."),
+        ("c:/a/b", "c:/a/b/c", "c"),
+        ("c:/a/b", "c:/a/b/c/d", "c/d"),
+        ("c:/a/b/c", "c:/a/b", ".."),
+        ("c:/a/b/c/d", "c:/a/b", "../.."),
+        ("c:/a/b/c/d", "c:/a", "../../.."),
+        ("c:/a/b/c/d", "c:/a/x/y/z", "../../../x/y/z"),
+        ("c:/a/b/c/d", "a/x/y/z", "a/x/y/z"),
+        ("c:/a/b/c/d", "c:/a/b/e/d", "../../e/d"),
+    ],
+)
+def test_get_relative_path_windows(base_path: str, target_path: str, expected: str) -> None:
+    """Test get_relative_path function on Windows paths."""
+    assert PureWindowsPath(
+        get_relative_path(PureWindowsPath(base_path), PureWindowsPath(target_path))
+    ) == PureWindowsPath(expected)
+
+
+def test_model_resolver_add_ref_with_hash() -> None:
+    """Test adding reference with URL fragment."""
+    model_resolver = ModelResolver()
+    reference = model_resolver.add_ref("https://json-schema.org/draft/2020-12/meta/core#")
+    assert reference.original_name == "core"
+
+
+def test_model_resolver_add_ref_without_hash() -> None:
+    """Test adding reference without URL fragment."""
+    model_resolver = ModelResolver()
+    reference = model_resolver.add_ref("meta/core")
+    assert reference.original_name == "core"
+
+
+def test_model_resolver_add_ref_unevaluated() -> None:
+    """Test adding reference for unevaluated schema."""
+    model_resolver = ModelResolver()
+    reference = model_resolver.add_ref("meta/unevaluated")
+    assert reference.original_name == "unevaluated"
+
+
+def test_base_url_context_sets_url_when_base_url_already_set() -> None:
+    """When _base_url is already set, base_url_context should switch to new URL."""
+    resolver = ModelResolver(base_url="https://example.com/original.json")
+    assert resolver.base_url == "https://example.com/original.json"
+
+    with resolver.base_url_context("https://example.com/new.json"):
+        assert resolver.base_url == "https://example.com/new.json"
+
+    # Should restore original
+    assert resolver.base_url == "https://example.com/original.json"
+
+
+def test_base_url_context_sets_url_when_new_value_is_url() -> None:
+    """When _base_url is None but new value is a URL, should set base_url."""
+    resolver = ModelResolver()
+    assert resolver.base_url is None
+
+    with resolver.base_url_context("https://example.com/schema.json"):
+        assert resolver.base_url == "https://example.com/schema.json"
+
+    # Should restore to None
+    assert resolver.base_url is None
+
+
+def test_base_url_context_noop_when_new_value_is_not_url() -> None:
+    """When _base_url is None and new value is not a URL, should do nothing."""
+    resolver = ModelResolver()
+    assert resolver.base_url is None
+
+    with resolver.base_url_context("../relative/path.json"):
+        # Should remain None because the value is not a URL
+        assert resolver.base_url is None
+
+    assert resolver.base_url is None
+
+
+def test_base_url_context_nested() -> None:
+    """Nested base_url_context should properly restore values."""
+    resolver = ModelResolver(base_url="https://example.com/level0.json")
+
+    with resolver.base_url_context("https://example.com/level1.json"):
+        assert resolver.base_url == "https://example.com/level1.json"
+
+        with resolver.base_url_context("https://example.com/level2.json"):
+            assert resolver.base_url == "https://example.com/level2.json"
+
+        assert resolver.base_url == "https://example.com/level1.json"
+
+    assert resolver.base_url == "https://example.com/level0.json"
+
+
+def test_resolve_ref_with_base_url_does_not_prepend_root_id_base_path() -> None:
+    """When base_url is set, root_id_base_path should not be prepended to refs."""
+    resolver = ModelResolver(base_url="https://example.com/schemas/main.json")
+    resolver.set_root_id("https://example.com/schemas/main.json")
+
+    # Resolve a relative ref
+    result = resolver.resolve_ref("../other/schema.json")
+
+    # Should resolve via join_url, not prepend root_id_base_path
+    assert result == "https://example.com/other/schema.json#"
+    # Should NOT be like "https://example.com/schemas/../other/schema.json#"
+
+
+def test_resolve_ref_with_base_url_nested_relative_refs() -> None:
+    """Nested relative refs should resolve correctly when base_url is set."""
+    resolver = ModelResolver(base_url="https://example.com/a/b/c/main.json")
+
+    # Resolve a deeply nested relative ref
+    result = resolver.resolve_ref("../../other/schema.json")
+
+    assert result == "https://example.com/a/other/schema.json#"
+
+
+def test_resolve_ref_with_base_url_context_switch() -> None:
+    """Relative refs should resolve correctly after base_url context switch."""
+    resolver = ModelResolver(base_url="https://example.com/schemas/person.json")
+
+    # Switch context to a different file
+    with resolver.base_url_context("https://example.com/schemas/definitions/pet.json"):
+        # Resolve a relative ref from the new context
+        result = resolver.resolve_ref("../common/types.json")
+
+        assert result == "https://example.com/schemas/common/types.json#"
+
+
+def test_resolve_ref_local_fragment_with_base_url() -> None:
+    """Local fragment refs should resolve to full URL when base_url is set."""
+    resolver = ModelResolver(base_url="https://example.com/schemas/main.json")
+
+    result = resolver.resolve_ref("#/definitions/Foo")
+
+    # When base_url is set, local fragments are resolved to full URL
+    assert result == "https://example.com/schemas/main.json#/definitions/Foo"
+
+
+@pytest.mark.parametrize(
+    ("ref", "expected"),
+    [
+        # HTTP/HTTPS URLs
+        ("https://example.com/schema.json", True),
+        ("http://example.com/schema.json", True),
+        ("https://example.com/path/to/schema.json", True),
+        # file:// URLs - recognized and handled via filesystem
+        ("file:///home/user/schema.json", True),
+        ("file:///C:/path/to/schema.json", True),
+        ("file://server/share/schema.json", True),
+        # file:/ (single slash) - NOT recognized as valid file URL
+        ("file:/home/user/schema.json", False),
+        # Other URL schemes - NOT recognized
+        ("ftp://example.com/schema.json", False),
+        # Relative paths (not URLs)
+        ("../relative/path.json", False),
+        ("relative/path.json", False),
+        # Local fragments (not URLs)
+        ("#/definitions/Foo", False),
+        ("#", False),
+        # Absolute paths (not URLs)
+        ("/absolute/path.json", False),
+        # Windows paths (not URLs)
+        ("c:/windows/path.json", False),
+        ("d:/path/to/file.json", False),
+    ],
+)
+def test_is_url(ref: str, expected: bool) -> None:
+    """Test is_url correctly identifies HTTP(S) and file:// URLs."""
+    assert is_url(ref) == expected
+
+
+def test_resolve_ref_with_root_id_differs_from_base_url() -> None:
+    """When $id differs from fetch URL, refs should resolve against $id."""
+    # Scenario: Schema fetched from CDN but has canonical $id
+    resolver = ModelResolver(base_url="https://cdn.example.com/latest/schema.json")
+    resolver.set_root_id("https://example.com/v1/schema.json")
+
+    result = resolver.resolve_ref("../common/types.json")
+
+    assert result == "https://example.com/common/types.json#"
+
+
+@pytest.mark.parametrize(
+    ("base_url", "ref", "expected"),
+    [
+        # file:// URL joining - relative refs
+        ("file:///home/user/schemas/main.json", "../common/types.json", "file:///home/user/common/types.json"),
+        ("file:///home/user/schemas/main.json", "other.json", "file:///home/user/schemas/other.json"),
+        ("file:///home/user/schemas/main.json", "./sub/schema.json", "file:///home/user/schemas/sub/schema.json"),
+        # file:// URL joining - absolute file:// refs
+        ("file:///home/user/schemas/main.json", "file:///other/schema.json", "file:///other/schema.json"),
+        # file:// URL joining - absolute path refs (starts with /)
+        ("file:///home/user/schemas/main.json", "/absolute/path.json", "file:///absolute/path.json"),
+        ("file://server/share/main.json", "/absolute/path.json", "file://server/absolute/path.json"),
+        # Windows-style file:// URLs
+        ("file:///C:/schemas/main.json", "../common/types.json", "file:///C:/common/types.json"),
+        # UNC file:// URLs
+        ("file://server/share/main.json", "../common/types.json", "file://server/share/common/types.json"),
+        ("file://server/share/main.json", "child.json", "file://server/share/child.json"),
+        # Fragment handling
+        (
+            "file:///home/user/schemas/main.json",
+            "other.json#/definitions/Foo",
+            "file:///home/user/schemas/other.json#/definitions/Foo",
+        ),
+        (
+            "file:///home/user/schemas/main.json",
+            "#/definitions/Bar",
+            "file:///home/user/schemas/main.json#/definitions/Bar",
+        ),
+        # Multiple .. traversal - stops at root for non-UNC
+        ("file:///a/b/main.json", "../../../other.json", "file:///other.json"),
+        # Multiple .. traversal - stops at share level for UNC (min_depth=1)
+        ("file://server/share/a/b/main.json", "../../../../other.json", "file://server/share/other.json"),
+        # Empty and dot segments
+        ("file:///home/user/schemas/main.json", "./", "file:///home/user/schemas/"),
+        ("file:///home/user/schemas/main.json", "a//b/./c.json", "file:///home/user/schemas/a/b/c.json"),
+        # Fragment-only ref without fragment content (just #)
+        ("file:///home/user/schemas/main.json", "#", "file:///home/user/schemas/main.json#"),
+        # Empty ref (keeps base URL unchanged)
+        ("file:///home/user/schemas/main.json", "", "file:///home/user/schemas/main.json"),
+        # Root directory base URL (triggers empty base_segments branch)
+        ("file:///", "schema.json", "file:///schema.json"),
+        ("file:///main.json", "../other.json", "file:///other.json"),
+    ],
+)
+def test_join_url_file_scheme(base_url: str, ref: str, expected: str) -> None:
+    """Test join_url correctly handles file:// URLs."""
+    assert join_url(base_url, ref) == expected
+
+
+def test_url_ref_matches_local_id_no_fragment() -> None:
+    """URL $ref matching a local $id should resolve to the $id's path (Issue #1747)."""
+    resolver = ModelResolver()
+    resolver.set_current_root([])
+    resolver.add_id("https://schemas.example.org/child", ["#", "$defs", "child"])
+
+    result = resolver.resolve_ref("https://schemas.example.org/child#")
+
+    assert result == "#/$defs/child"
+
+
+def test_url_ref_matches_local_id_with_fragment() -> None:
+    """URL $ref with fragment should combine $id path with fragment (Issue #1747)."""
+    resolver = ModelResolver()
+    resolver.set_current_root([])
+    resolver.add_id("https://schemas.example.org/child", ["#", "$defs", "child"])
+
+    result = resolver.resolve_ref("https://schemas.example.org/child#/properties/name")
+
+    assert result == "#/$defs/child/properties/name"
+
+
+def test_url_ref_no_matching_local_id() -> None:
+    """URL $ref not matching any local $id should remain as URL (Issue #1747)."""
+    resolver = ModelResolver()
+    resolver.set_current_root([])
+
+    result = resolver.resolve_ref("https://schemas.example.org/other#")
+
+    assert result == "https://schemas.example.org/other#"
+
+
+def test_url_ref_matches_local_id_nested_fragment() -> None:
+    """URL $ref with deeply nested fragment should resolve correctly (Issue #1747)."""
+    resolver = ModelResolver()
+    resolver.set_current_root([])
+    resolver.add_id("https://example.org/types", ["#", "$defs", "types"])
+
+    result = resolver.resolve_ref("https://example.org/types#/definitions/Address/properties/city")
+
+    assert result == "#/$defs/types/definitions/Address/properties/city"
+
+
+def test_url_ref_matches_local_id_with_base_url() -> None:
+    """URL $ref matching local $id should resolve via $id mapping even when base_url is set (Issue #1747)."""
+    resolver = ModelResolver(base_url="https://cdn.example.com/schemas/main.json")
+    resolver.set_current_root([])
+    resolver.add_id("https://schemas.example.org/child", ["#", "$defs", "child"])
+
+    result = resolver.resolve_ref("https://schemas.example.org/child#")
+
+    assert result == "https://cdn.example.com/schemas/main.json#/$defs/child"
+
+
+def test_url_ref_matches_local_id_preserves_empty_json_pointer_token() -> None:
+    """URL $ref fragment with empty JSON Pointer token (//) should be preserved (Issue #1747)."""
+    resolver = ModelResolver()
+    resolver.set_current_root([])
+    resolver.add_id("https://example.org/types", ["#", "$defs", "types"])
+
+    result = resolver.resolve_ref("https://example.org/types#/items//child")
+
+    assert result == "#/$defs/types/items//child"
+
+
+def test_resolve_ref_local_fragment_with_base_url_and_current_root() -> None:
+    """Local fragment refs should resolve to current_root when it's set, even with base_url (Issue #1798)."""
+    resolver = ModelResolver(base_url="https://raw.githubusercontent.com/user/repo/schema.json")
+    resolver.set_root_id("https://cveproject.github.io/schema/schema.json")
+    resolver.set_current_root(["https://raw.githubusercontent.com/user/repo/schema.json"])
+
+    result = resolver.resolve_ref("#/definitions/Foo")
+
+    assert result == "https://raw.githubusercontent.com/user/repo/schema.json#/definitions/Foo"
+
+
+def test_resolve_ref_local_fragment_with_different_host_base_url_and_root_id() -> None:
+    """Local fragment refs should resolve correctly when base_url and root_id have different hosts (Issue #1798)."""
+    resolver = ModelResolver(base_url="https://raw.githubusercontent.com/user/repo/schema.json")
+    resolver.set_root_id("https://cveproject.github.io/schema/schema.json")
+    resolver.set_current_root(["https://raw.githubusercontent.com/user/repo/schema.json"])
+
+    result = resolver.resolve_ref("#/definitions/product/properties/url")
+
+    assert result == "https://raw.githubusercontent.com/user/repo/schema.json#/definitions/product/properties/url"
+
+
+def test_resolve_ref_local_fragment_without_current_root_falls_back_to_url() -> None:
+    """Local fragment refs without current_root should fall back to URL resolution (Issue #1798)."""
+    resolver = ModelResolver(base_url="https://example.com/schemas/main.json")
+
+    result = resolver.resolve_ref("#/definitions/Foo")
+
+    assert result == "https://example.com/schemas/main.json#/definitions/Foo"
diff -pruN 0.26.4-3/tests/test_resolver.py 0.45.0-1/tests/test_resolver.py
--- 0.26.4-3/tests/test_resolver.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/test_resolver.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,138 @@
+"""Tests for field name resolver functionality."""
+
+from __future__ import annotations
+
+import pytest
+
+from datamodel_code_generator.reference import FieldNameResolver
+
+
+@pytest.mark.parametrize(
+    ("name", "expected_resolved"),
+    [
+        ("3a", "field_3a"),
+        ("$in", "field_in"),
+        ("field", "field"),
+    ],
+)
+def test_get_valid_field_name(name: str, expected_resolved: str) -> None:
+    """Test field name resolution to valid Python identifiers."""
+    resolver = FieldNameResolver()
+    assert expected_resolved == resolver.get_valid_name(name)
+
+
+def test_hierarchical_flat_alias() -> None:
+    """Test traditional flat alias resolution."""
+    resolver = FieldNameResolver(aliases={"name": "name_alias"})
+    field_name, alias = resolver.get_valid_field_name_and_alias("name")
+    assert field_name == "name_alias"
+    assert alias == "name"
+
+
+def test_hierarchical_scoped_alias() -> None:
+    """Test scoped alias resolution (ClassName.field)."""
+    resolver = FieldNameResolver(
+        aliases={
+            "User.name": "user_name",
+            "Address.name": "address_name",
+            "name": "default_name",
+        }
+    )
+
+    field_name, alias = resolver.get_valid_field_name_and_alias("name", class_name="User")
+    assert field_name == "user_name"
+    assert alias == "name"
+
+    field_name, alias = resolver.get_valid_field_name_and_alias("name", class_name="Address")
+    assert field_name == "address_name"
+    assert alias == "name"
+
+    field_name, alias = resolver.get_valid_field_name_and_alias("name", class_name="Other")
+    assert field_name == "default_name"
+    assert alias == "name"
+
+
+def test_hierarchical_alias_priority() -> None:
+    """Test that scoped aliases have priority over flat aliases."""
+    resolver = FieldNameResolver(
+        aliases={
+            "User.name": "scoped_name",
+            "name": "flat_name",
+        }
+    )
+
+    field_name, _ = resolver.get_valid_field_name_and_alias("name", class_name="User")
+    assert field_name == "scoped_name"
+
+    field_name, _ = resolver.get_valid_field_name_and_alias("name", class_name="Other")
+    assert field_name == "flat_name"
+
+
+def test_hierarchical_class_name_provided_but_no_scoped_aliases() -> None:
+    """Test when class_name is provided but no scoped aliases are configured."""
+    resolver = FieldNameResolver(aliases={"name": "name_alias"})
+    field_name, alias = resolver.get_valid_field_name_and_alias("name", class_name="User")
+    assert field_name == "name_alias"
+    assert alias == "name"
+
+
+def test_hierarchical_scoped_alias_not_matching() -> None:
+    """Test when scoped alias exists but doesn't match current class."""
+    resolver = FieldNameResolver(
+        aliases={
+            "Other.name": "other_name",
+            "name": "default_name",
+        }
+    )
+    field_name, alias = resolver.get_valid_field_name_and_alias("name", class_name="User")
+    assert field_name == "default_name"
+    assert alias == "name"
+
+
+def test_hierarchical_no_alias_match() -> None:
+    """Test that unmatched fields return valid name without alias."""
+    resolver = FieldNameResolver(aliases={"other": "other_alias"})
+    field_name, alias = resolver.get_valid_field_name_and_alias("name")
+    assert field_name == "name"
+    assert alias is None
+
+
+def test_hierarchical_backward_compatibility() -> None:
+    """Test that existing flat alias behavior is preserved."""
+    resolver = FieldNameResolver(aliases={"name": "name_", "id": "id_"})
+    field_name, alias = resolver.get_valid_field_name_and_alias("name")
+    assert field_name == "name_"
+    assert alias == "name"
+
+    field_name, alias = resolver.get_valid_field_name_and_alias("id")
+    assert field_name == "id_"
+    assert alias == "id"
+
+
+def test_hierarchical_dotted_field_name_alias() -> None:
+    """Test that field names containing dots can be aliased (backward compat)."""
+    resolver = FieldNameResolver(aliases={"filter.name": "filter_name_alias"})
+    field_name, alias = resolver.get_valid_field_name_and_alias("filter.name")
+    assert field_name == "filter_name_alias"
+    assert alias == "filter.name"
+
+
+def test_hierarchical_dotted_field_name_without_class_name() -> None:
+    """Test dotted field name alias works without class_name parameter."""
+    resolver = FieldNameResolver(
+        aliases={
+            "a.b": "a_b_alias",
+            "User.name": "user_name",
+        }
+    )
+    field_name, alias = resolver.get_valid_field_name_and_alias("a.b")
+    assert field_name == "a_b_alias"
+    assert alias == "a.b"
+
+
+def test_hierarchical_path_parameter_backward_compatibility() -> None:
+    """Test that path parameter is accepted but ignored."""
+    resolver = FieldNameResolver(aliases={"name": "name_alias"})
+    field_name, alias = resolver.get_valid_field_name_and_alias("name", path=["root", "properties", "name"])
+    assert field_name == "name_alias"
+    assert alias == "name"
diff -pruN 0.26.4-3/tests/test_types.py 0.45.0-1/tests/test_types.py
--- 0.26.4-3/tests/test_types.py	1970-01-01 00:00:00.000000000 +0000
+++ 0.45.0-1/tests/test_types.py	2025-12-19 19:37:31.000000000 +0000
@@ -0,0 +1,135 @@
+"""Tests for type manipulation utilities."""
+
+from __future__ import annotations
+
+import pytest
+
+from datamodel_code_generator.types import _remove_none_from_union, get_optional_type
+
+
+@pytest.mark.parametrize(
+    ("input_", "use_union_operator", "expected"),
+    [
+        ("List[str]", False, "Optional[List[str]]"),
+        ("List[str, int, float]", False, "Optional[List[str, int, float]]"),
+        ("List[str, int, None]", False, "Optional[List[str, int, None]]"),
+        ("Union[str]", False, "Optional[str]"),
+        ("Union[str, int, float]", False, "Optional[Union[str, int, float]]"),
+        ("Union[str, int, None]", False, "Optional[Union[str, int]]"),
+        ("Union[str, int, None, None]", False, "Optional[Union[str, int]]"),
+        (
+            "Union[str, int, List[str, int, None], None]",
+            False,
+            "Optional[Union[str, int, List[str, int, None]]]",
+        ),
+        (
+            "Union[str, int, List[str, Dict[int, str | None]], None]",
+            False,
+            "Optional[Union[str, int, List[str, Dict[int, str | None]]]]",
+        ),
+        ("List[str]", True, "List[str] | None"),
+        ("List[str | int | float]", True, "List[str | int | float] | None"),
+        ("List[str | int | None]", True, "List[str | int | None] | None"),
+        ("str", True, "str | None"),
+        ("str | int | float", True, "str | int | float | None"),
+        ("str | int | None", True, "str | int | None"),
+        ("str | int | None | None", True, "str | int | None"),
+        (
+            "str | int | List[str | Dict[int | Union[str | None]]] | None",
+            True,
+            "str | int | List[str | Dict[int | Union[str | None]]] | None",
+        ),
+    ],
+)
+def test_get_optional_type(input_: str, use_union_operator: bool, expected: str) -> None:
+    """Test get_optional_type function with various type strings."""
+    assert get_optional_type(input_, use_union_operator) == expected
+
+
+@pytest.mark.parametrize(
+    ("type_str", "use_union_operator", "expected"),
+    [
+        # Traditional Union syntax
+        ("Union[str, None]", False, "str"),
+        ("Union[str, int, None]", False, "Union[str, int]"),
+        ("Union[None, str]", False, "str"),
+        ("Union[None]", False, "None"),
+        ("Union[None, None]", False, "None"),
+        ("Union[Union[str, None], int]", False, "Union[str, int]"),
+        # Union for constraint strings with pattern or regex
+        (
+            "Union[constr(pattern=r'^a,b$'), None]",
+            False,
+            "constr(pattern=r'^a,b$')",
+        ),
+        (
+            "Union[constr(regex=r'^a,b$'), None]",
+            False,
+            "constr(regex=r'^a,b$')",
+        ),
+        (
+            "Union[constr(pattern=r'^\\d+,\\w+$'), None]",
+            False,
+            "constr(pattern=r'^\\d+,\\w+$')",
+        ),
+        (
+            "Union[constr(regex=r'^\\d+,\\w+$'), None]",
+            False,
+            "constr(regex=r'^\\d+,\\w+$')",
+        ),
+        # Union operator syntax
+        ("str | None", True, "str"),
+        ("int | str | None", True, "int | str"),
+        ("None | str", True, "str"),
+        ("None | None", True, "None"),
+        ("constr(pattern='0|1') | None", True, "constr(pattern='0|1')"),
+        ("constr(pattern='0  |1') | int | None", True, "constr(pattern='0  |1') | int"),
+        # Complex nested types - traditional syntax
+        ("Union[str, int] | None", True, "Union[str, int]"),
+        (
+            "Optional[List[Dict[str, Any]]] | None",
+            True,
+            "Optional[List[Dict[str, Any]]]",
+        ),
+        # Union for constraint strings with pattern or regex on nested types
+        (
+            "Union[constr(pattern=r'\\['), Union[str, None], int]",
+            False,
+            "Union[constr(pattern=r'\\['), str, int]",
+        ),
+        (
+            "Union[constr(regex=r'\\['), Union[str, None], int]",
+            False,
+            "Union[constr(regex=r'\\['), str, int]",
+        ),
+        # Complex nested types - union operator syntax
+        ("List[str | None] | None", True, "List[str | None]"),
+        (
+            "List[constr(pattern='0|1') | None] | None",
+            True,
+            "List[constr(pattern='0|1') | None]",
+        ),
+        (
+            "List[constr(pattern='0 | 1') | None] | None",
+            True,
+            "List[constr(pattern='0 | 1') | None]",
+        ),
+        (
+            "List[constr(pattern='0  | 1') | None] | None",
+            True,
+            "List[constr(pattern='0  | 1') | None]",
+        ),
+        ("Dict[str, int] | None | List[str]", True, "Dict[str, int] | List[str]"),
+        # Edge cases that test the fixed regex pattern issue
+        ("List[str] | None", True, "List[str]"),
+        ("Dict[str, int] | None", True, "Dict[str, int]"),
+        ("Tuple[int, ...] | None", True, "Tuple[int, ...]"),
+        ("Callable[[int], str] | None", True, "Callable[[int], str]"),
+        # Non-union types (should be returned as-is)
+        ("str", False, "str"),
+        ("List[str]", False, "List[str]"),
+    ],
+)
+def test_remove_none_from_union(type_str: str, use_union_operator: bool, expected: str) -> None:
+    """Test _remove_none_from_union function with various type strings."""
+    assert _remove_none_from_union(type_str, use_union_operator=use_union_operator) == expected
